Skip to content

Commit

Permalink
update cookbook
Browse files Browse the repository at this point in the history
  • Loading branch information
jalexanderII committed Dec 21, 2023
1 parent 0275928 commit cd12e0c
Show file tree
Hide file tree
Showing 13 changed files with 41 additions and 41 deletions.
3 changes: 1 addition & 2 deletions parea/api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@
import httpx


# http://127.0.0.1:8000/api/parea/v1/trace_log
class HTTPClient:
_instance = None
base_url = "http://127.0.0.1:8000/api/parea/v1" # "https://parea-ai-backend-e2adf7624bcb3980.onporter.run/api/parea/v1"
base_url = "https://parea-ai-backend-e2adf7624bcb3980.onporter.run/api/parea/v1"
api_key = None

def __new__(cls, *args, **kwargs):
Expand Down
38 changes: 25 additions & 13 deletions parea/cookbook/langchain/trace_langchain_RAG_evals.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
load_dotenv()

# Need to instantiate Parea for tracing and evals
p = Parea(api_key=os.getenv("DEV_API_KEY"))
p = Parea(api_key=os.getenv("PAREA_API_KEY"))


class DocumentRetriever:
Expand Down Expand Up @@ -87,33 +87,45 @@ def __init__(self, retriever, model: str):
| response_generator
)

def get_chain(self):
return self.chain

def get_context(self) -> str:
"""Helper to get the context from a retrieval chain, so we can use it for evaluation metrics."""
return self.context

def _format_docs(self, docs) -> str:
context = "\n\n".join(doc.page_content for doc in docs)
# set context as an attribute, so we can access it later
self.context = context
return context

def get_chain(self):
return self.chain


# EXAMPLE EVALUATION TEST CASES
eval_questions = [
# "What is the population of New York City as of 2020?",
"What is the population of New York City as of 2020?",
"Which borough of New York City has the highest population? Only respond with the name of the borough.",
# "What is the economic significance of New York City?",
# "How did New York City get its name?",
# "What is the significance of the Statue of Liberty in New York City?",
"What is the economic significance of New York City?",
"How did New York City get its name?",
"What is the significance of the Statue of Liberty in New York City?",
]

eval_answers = [
# "8,804,190",
"8,804,190",
"Brooklyn",
# "New York City's economic significance is vast, as it serves as the global financial capital, housing Wall Street and major financial institutions. Its diverse economy spans technology, media, healthcare, education, and more, making it resilient to economic fluctuations. NYC is a hub for international business, attracting global companies, and boasts a large, skilled labor force. Its real estate market, tourism, cultural industries, and educational institutions further fuel its economic prowess. The city's transportation network and global influence amplify its impact on the world stage, solidifying its status as a vital economic player and cultural epicenter.",
# "New York City got its name when it came under British control in 1664. King Charles II of England granted the lands to his brother, the Duke of York, who named the city New York in his own honor.",
# "The Statue of Liberty in New York City holds great significance as a symbol of the United States and its ideals of liberty and peace. It greeted millions of immigrants who arrived in the U.S. by ship in the late 19th and early 20th centuries, representing hope and freedom for those seeking a better life. It has since become an iconic landmark and a global symbol of cultural diversity and freedom.",
"""New York City's economic significance is vast, as it serves as the global financial capital, housing Wall
Street and major financial institutions. Its diverse economy spans technology, media, healthcare, education,
and more, making it resilient to economic fluctuations. NYC is a hub for international business, attracting
global companies, and boasts a large, skilled labor force. Its real estate market, tourism, cultural industries,
and educational institutions further fuel its economic prowess. The city's transportation network and global
influence amplify its impact on the world stage, solidifying its status as a vital economic player and cultural
epicenter.""",
"""New York City got its name when it came under British control in 1664. King Charles II of England granted the
lands to his brother, the Duke of York, who named the city New York in his own honor.""",
"""The Statue of Liberty in New York City holds great significance as a symbol of the United States and its
ideals of liberty and peace. It greeted millions of immigrants who arrived in the U.S. by ship in the late 19th
and early 20th centuries, representing hope and freedom for those seeking a better life. It has since become an
iconic landmark and a global symbol of cultural diversity and freedom.""",
]

# set up evaluation functions we want to test, provide the name of the relevant fields needed for each eval function
Expand Down Expand Up @@ -160,7 +172,7 @@ def main():
# build log component needed for evaluation metric functions
log = create_log(model, question, context, output, answer)

# run evaluation metric functions in a thread to avoid blocking return of chain
# helper function to run evaluation metrics in a thread to avoid blocking return of chain
run_evals_in_thread_and_log(trace_id=str(parent_trace_id), log=log, eval_funcs=EVALS, verbose=True)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,7 @@

load_dotenv()

p = Parea(api_key=os.getenv("DEV_API_KEY"))

os.environ["LANGCHAIN_PROJECT"] = "rag-example"

p = Parea(api_key=os.getenv("PAREA_API_KEY"))

model = AnthropicFunctions(model="claude-2")

Expand Down Expand Up @@ -44,9 +41,8 @@
},
"required": ["name", "height"],
}
inp = """
Alex is 5 feet tall. Claudia is 1 feet taller Alex and jumps higher than him. Claudia is a brunette and Alex is blonde.
"""
inp = """Alex is 5 feet tall. Claudia is 1 feet taller Alex and jumps higher than him. Claudia is a brunette and Alex
is blonde."""

chain = create_extraction_chain(schema, model)

Expand Down
5 changes: 1 addition & 4 deletions parea/cookbook/langchain/trace_langchain_rag_agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,7 @@

load_dotenv()

p = Parea(api_key=os.getenv("DEV_API_KEY"))

os.environ["LANGCHAIN_PROJECT"] = "rag-example"

p = Parea(api_key=os.getenv("PAREA_API_KEY"))

loader = TextLoader("../data/state_of_the_union.txt")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@

load_dotenv()

p = Parea(api_key=os.getenv("DEV_API_KEY"))

os.environ["LANGCHAIN_PROJECT"] = "rag-example"
p = Parea(api_key=os.getenv("PAREA_API_KEY"))

loader = WebBaseLoader(
web_paths=("https://lilianweng.github.io/posts/2023-06-23-agent/",),
Expand Down
2 changes: 1 addition & 1 deletion parea/cookbook/tracing_with_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

load_dotenv()

p = Parea(api_key=os.getenv("DEV_API_KEY"))
p = Parea(api_key=os.getenv("PAREA_API_KEY"))

# Parea SDK makes it easy to use different LLMs with the same apis structure and standardized request/response schemas.
LLM_OPTIONS = [("gpt-3.5-turbo", "openai"), ("gpt-4", "openai"), ("claude-instant-1", "anthropic"), ("claude-2", "anthropic")]
Expand Down
4 changes: 1 addition & 3 deletions parea/cookbook/tracing_with_deployed_prompt.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from typing import Tuple

import json
import os
from datetime import datetime
Expand All @@ -13,7 +11,7 @@

load_dotenv()

p = Parea(api_key=os.getenv("DEV_API_KEY"))
p = Parea(api_key=os.getenv("PAREA_API_KEY"))


def deployed_argument_generator(query: str, additional_description: str = "") -> str:
Expand Down
2 changes: 1 addition & 1 deletion parea/cookbook/tracing_with_open_ai_endpoint_directly.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

openai.api_key = os.getenv("OPENAI_API_KEY")

p = Parea(api_key=os.getenv("DEV_API_KEY"))
p = Parea(api_key=os.getenv("PAREA_API_KEY"))


def call_llm(data: list[dict], model: str = "gpt-3.5-turbo", temperature: float = 0.0) -> str:
Expand Down
2 changes: 1 addition & 1 deletion parea/cookbook/tracing_with_openai_with_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
PLACES_URL = "https://maps.googleapis.com/maps/api/place/nearbysearch/json"
openai.api_key = os.getenv("OPENAI_API_KEY")

p = Parea(api_key=os.getenv("DEV_API_KEY"))
p = Parea(api_key=os.getenv("PAREA_API_KEY"))

functions = [
{
Expand Down
2 changes: 1 addition & 1 deletion parea/cookbook/tracing_without_deployed_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

load_dotenv()

p = Parea(api_key=os.getenv("DEV_API_KEY"))
p = Parea(api_key=os.getenv("PAREA_API_KEY"))


@trace # <--- If you want to log the inputs to the LLM call you can optionally add a trace decorator here
Expand Down
4 changes: 2 additions & 2 deletions parea/evals/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Callable, Dict, List, Union
from typing import Callable, Union

import json
import warnings
Expand Down Expand Up @@ -106,7 +106,7 @@ def make_evaluations(trace_id: str, log: Log, eval_funcs: list[EvalFuncTuple], v
for score in scores:
print(score)
parea_logger.update_log(data=UpdateLog(trace_id=trace_id, field_name_to_value_map={"scores": scores, "target": log.target}))
print(f"View trace at: http://localhost:3000/logs/detailed/{trace_id} \n")
print(f"View trace at: https://app.parea.ai/logs/detailed/{trace_id} \n")


def run_evals_in_thread_and_log(trace_id: str, log: Log, eval_funcs: list[EvalFuncTuple], verbose: bool = False):
Expand Down
4 changes: 2 additions & 2 deletions parea/parea_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,14 @@ def record_vendor_log(self, data: dict[str, Any], vendor: TraceIntegrations) ->
self._client.request(
"POST",
VENDOR_LOG_ENDPOINT.format(vendor=vendor.value),
data=json.loads(json_dumps(data)),
data=json.loads(json_dumps(data)), # uuid is not serializable
)

async def arecord_vendor_log(self, data: dict[str, Any], vendor: TraceIntegrations) -> None:
await self._client.request_async(
"POST",
VENDOR_LOG_ENDPOINT.format(vendor=vendor.value),
data=json.loads(json_dumps(data)),
data=json.loads(json_dumps(data)), # uuid is not serializable
)


Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "parea-ai"
packages = [{ include = "parea" }]
version = "0.2.23"
version = "0.2.24"
description = "Parea python sdk"
readme = "README.md"
authors = ["joel-parea-ai <[email protected]>"]
Expand Down

0 comments on commit cd12e0c

Please sign in to comment.