Skip to content

Commit

Permalink
Merge pull request #27 from parea-ai/PAI-198-allow-capturing-of-feedb…
Browse files Browse the repository at this point in the history
…ack-to-a-trace-via-sdk

PAI-198-allow-capturing-of-feedback-to-a-trace-via-sdk
  • Loading branch information
jalexanderII committed Aug 13, 2023
2 parents db69e77 + ee3d91a commit cb97066
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 4 deletions.
3 changes: 2 additions & 1 deletion .env.sample
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
API_KEY=<API_KEY>
API_KEY=<key>
DEPLOYMENT_ID=<id>
17 changes: 16 additions & 1 deletion parea/client.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import uuid

from attrs import asdict, define, field

from parea.api_client import HTTPClient
from parea.schemas.models import Completion, CompletionResponse, UseDeployedPrompt, UseDeployedPromptResponse
from parea.schemas.models import Completion, CompletionResponse, FeedbackRequest, UseDeployedPrompt, UseDeployedPromptResponse

COMPLETION_ENDPOINT = "/completion"
DEPLOYED_PROMPT_ENDPOINT = "/deployed-prompt"
RECORD_FEEDBACK_ENDPOINT = "/feedback"


@define
Expand Down Expand Up @@ -46,3 +49,15 @@ async def aget_prompt(self, data: UseDeployedPrompt) -> UseDeployedPromptRespons
data=asdict(data),
)
return UseDeployedPromptResponse(**r.json())

async def record_feedback(self, data: FeedbackRequest) -> None:
await self._client.request_async(
"POST",
RECORD_FEEDBACK_ENDPOINT,
data=asdict(data),
)


def gen_trace_id() -> str:
"""Generate a unique trace id for each chain of requests"""
return str(uuid.uuid4())
17 changes: 16 additions & 1 deletion parea/schemas/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from enum import Enum

from attrs import define
from attrs import define, field, validators


class Role(str, Enum):
Expand Down Expand Up @@ -41,12 +41,16 @@ class LLMInputs:

@define
class Completion:
trace_id: Optional[str] = None
trace_name: Optional[str] = None
llm_inputs: Optional[dict[str, Any]] = None
llm_configuration: LLMInputs = LLMInputs()
end_user_identifier: Optional[str] = None
deployment_id: Optional[str] = None
name: Optional[str] = None
metadata: Optional[dict] = None
tags: Optional[list[str]] = None
target: Optional[str] = None
cache: bool = True
log_omit_inputs: bool = False
log_omit_outputs: bool = False
Expand All @@ -55,6 +59,7 @@ class Completion:

@define
class CompletionResponse:
inference_id: str
content: str
latency: float
input_tokens: int
Expand All @@ -67,6 +72,7 @@ class CompletionResponse:
status: str
start_timestamp: str
end_timestamp: str
trace_id: Optional[str] = None
error: Optional[str] = None


Expand All @@ -93,3 +99,12 @@ class UseDeployedPromptResponse:
model: Optional[str] = None
provider: Optional[str] = None
model_params: Optional[dict[str, Any]] = None


@define
class FeedbackRequest:
score: float = field(validator=[validators.ge(0), validators.le(1)])
trace_id: Optional[str] = None
inference_id: Optional[str] = None
name: Optional[str] = None
target: Optional[str] = None
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "parea-ai"
packages = [{ include = "parea" }]
version = "0.1.2"
version = "0.1.3"
description = "Parea python sdk"
readme = "README.md"
authors = ["joel-parea-ai <[email protected]>"]
Expand Down

0 comments on commit cb97066

Please sign in to comment.