Skip to content

Commit

Permalink
Merge pull request #899 from parea-ai/feat-anthropic-instructor
Browse files Browse the repository at this point in the history
feat: anthropic instructor integration
  • Loading branch information
joschkabraun authored May 24, 2024
2 parents ff205ab + 007ced7 commit bb8eeed
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 9 deletions.
17 changes: 10 additions & 7 deletions parea/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,20 +93,23 @@ def wrap_openai_client(self, client: "OpenAI", integration: Optional[str] = None
BetaWrappers(client).init()

if integration:
self._client.add_integration(integration)

if integration == "instructor":
from parea.utils.trace_integrations.instructor import instrument_instructor_validation_errors

instrument_instructor_validation_errors()
self._add_integration(integration)

def wrap_anthropic_client(self, client: "Anthropic", integration: Optional[str] = None) -> None:
from parea.wrapper.anthropic.anthropic import AnthropicWrapper

AnthropicWrapper().init(log=logger_all_possible, cache=self.cache, client=client)

if integration:
self._client.add_integration(integration)
self._add_integration(integration)

def _add_integration(self, integration: str) -> None:
self._client.add_integration(integration)

if integration == "instructor":
from parea.utils.trace_integrations.instructor import instrument_instructor_validation_errors

instrument_instructor_validation_errors()

def auto_trace_openai_clients(self, integration: Optional[str]) -> None:
import openai
Expand Down
7 changes: 6 additions & 1 deletion parea/wrapper/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,12 @@ def _init_trace(self, kwargs) -> Tuple[str, datetime, contextvars.Token]:
if template_inputs := kwargs.pop("template_inputs", None):
for m in kwargs.get("messages", []):
if isinstance(m, dict) and "content" in m:
m["content"] = safe_format_template_to_prompt(m["content"], **template_inputs)
if isinstance(m["content"], str):
m["content"] = safe_format_template_to_prompt(m["content"], **template_inputs)
elif isinstance(m["content"], list):
for i, item in enumerate(m["content"]):
if isinstance(item, dict) and "text" in item:
m["content"][i]["text"] = safe_format_template_to_prompt(item["text"], **template_inputs)

depth = len(new_trace_context) - 1
root_trace_id = new_trace_context[0]
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "parea-ai"
packages = [{ include = "parea" }]
version = "0.2.159"
version = "0.2.160"
description = "Parea python sdk"
readme = "README.md"
authors = ["joel-parea-ai <[email protected]>"]
Expand Down

0 comments on commit bb8eeed

Please sign in to comment.