Skip to content

Commit

Permalink
fix(openai): support new model in costs
Browse files Browse the repository at this point in the history
  • Loading branch information
jalexanderII committed Apr 10, 2024
1 parent fbf500e commit cdcfce9
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 9 deletions.
30 changes: 30 additions & 0 deletions parea/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,11 @@ def str2bool(v):
"completion": 60.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-4-1106-vision-preview": {
"prompt": 30.0,
"completion": 60.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-4-turbo-preview": {
"prompt": 10.0,
"completion": 30.0,
Expand All @@ -120,6 +125,16 @@ def str2bool(v):
"completion": 30.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-4-turbo": {
"prompt": 10.0,
"completion": 30.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-4-turbo-2024-04-09": {
"prompt": 10.0,
"completion": 30.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
}
AZURE_MODEL_INFO: Dict[str, Dict[str, Union[float, int, Dict[str, int]]]] = {
"gpt-35-turbo": {
Expand Down Expand Up @@ -192,11 +207,26 @@ def str2bool(v):
"completion": 30.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-4-1106-vision-preview": {
"prompt": 30.0,
"completion": 60.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-35-turbo-instruct": {
"prompt": 10.0,
"completion": 30.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-4-turbo": {
"prompt": 10.0,
"completion": 30.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
"gpt-4-turbo-2024-04-09": {
"prompt": 10.0,
"completion": 30.0,
"token_limit": {"max_completion_tokens": 4096, "max_prompt_tokens": 128000},
},
}

instant_pricing = {"prompt": 1.63, "completion": 5.51}
Expand Down
16 changes: 8 additions & 8 deletions parea/cookbook/tracing_with_open_ai_endpoint_directly.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
p.wrap_openai_client(client)


def call_llm(data: List[dict], model: str = "gpt-3.5-turbo", temperature: float = 0.0) -> str:
def call_llm(data: List[dict], model: str = "gpt-4-turbo", temperature: float = 0.0) -> str:
return client.chat.completions.create(model=model, temperature=temperature, messages=data).choices[0].message.content


Expand Down Expand Up @@ -82,7 +82,7 @@ def argument_chain(query: str, additional_description: str = "") -> Tuple[str, s
@trace
def json_call() -> str:
completion = client.chat.completions.create(
model="gpt-3.5-turbo-0125",
model="gpt-4-turbo-2024-04-09",
messages=[{"role": "system", "content": "You are a helpful assistant talking in JSON."}, {"role": "user", "content": "What are you?"}],
response_format={"type": "json_object"},
)
Expand All @@ -95,11 +95,11 @@ def json_call() -> str:
additional_description="Provide a concise, few sentence argument on why sparkling wine is good for you.",
)
print(result)
p.record_feedback(
FeedbackRequest(
trace_id=trace_id,
score=0.7, # 0.0 (bad) to 1.0 (good)
)
)
# p.record_feedback(
# FeedbackRequest(
# trace_id=trace_id,
# score=0.7, # 0.0 (bad) to 1.0 (good)
# )
# )

print(json_call())
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "parea-ai"
packages = [{ include = "parea" }]
version = "0.2.127"
version = "0.2.128"
description = "Parea python sdk"
readme = "README.md"
authors = ["joel-parea-ai <[email protected]>"]
Expand Down

0 comments on commit cdcfce9

Please sign in to comment.