Skip to content

Commit

Permalink
Merge pull request #307 from Mirascope/feature/otel-llm-updates
Browse files Browse the repository at this point in the history
feat: otel llm updates
  • Loading branch information
brenkao committed Jun 7, 2024
2 parents 1275904 + 3d5d241 commit 9dc7db1
Show file tree
Hide file tree
Showing 21 changed files with 704 additions and 109 deletions.
58 changes: 58 additions & 0 deletions mirascope/anthropic/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
ContentBlockStartEvent,
Message,
MessageParam,
MessageStartEvent,
MessageStreamEvent,
TextBlock,
TextDelta,
Expand Down Expand Up @@ -153,6 +154,21 @@ def content(self) -> str:
block = self.response.content[0]
return block.text if block.type == "text" else ""

@property
def model(self) -> str:
"""Returns the name of the response model."""
return self.response.model

@property
def id(self) -> str:
"""Returns the id of the response."""
return self.response.id

@property
def finish_reasons(self) -> Optional[list[str]]:
"""Returns the finish reason of the response."""
return [str(self.response.stop_reason)]

@property
def usage(self) -> Usage:
"""Returns the usage of the message."""
Expand Down Expand Up @@ -241,3 +257,45 @@ def content(self) -> str:
self.chunk.delta.text if isinstance(self.chunk.delta, TextDelta) else ""
)
return ""

@property
def model(self) -> Optional[str]:
"""Returns the name of the response model."""
if isinstance(self.chunk, MessageStartEvent):
return self.chunk.message.model
return None

@property
def id(self) -> Optional[str]:
"""Returns the id of the response."""
if isinstance(self.chunk, MessageStartEvent):
return self.chunk.message.id
return None

@property
def finish_reasons(self) -> Optional[list[str]]:
"""Returns the finish reason of the response."""
if isinstance(self.chunk, MessageStartEvent):
return [str(self.chunk.message.stop_reason)]
return None

@property
def usage(self) -> Optional[Usage]:
"""Returns the usage of the message."""
if isinstance(self.chunk, MessageStartEvent):
return self.chunk.message.usage
return None

@property
def input_tokens(self) -> Optional[int]:
"""Returns the number of input tokens."""
if self.usage:
return self.usage.input_tokens
return None

@property
def output_tokens(self) -> Optional[int]:
"""Returns the number of output tokens."""
if self.usage:
return self.usage.output_tokens
return None
69 changes: 69 additions & 0 deletions mirascope/base/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,27 @@ def content(self) -> str:
"""
... # pragma: no cover

@property
@abstractmethod
def finish_reasons(self) -> Union[None, list[str]]:
"""Should return the finish reasons of the response.
If there is no finish reason, this method must return None.
"""
... # pragma: no cover

@property
@abstractmethod
def model(self) -> Optional[str]:
"""Should return the name of the response model."""
... # pragma: no cover

@property
@abstractmethod
def id(self) -> Optional[str]:
"""Should return the id of the response."""
... # pragma: no cover

@property
@abstractmethod
def usage(self) -> Any:
Expand Down Expand Up @@ -223,3 +244,51 @@ def content(self) -> str:
the empty string.
"""
... # pragma: no cover

@property
@abstractmethod
def model(self) -> Optional[str]:
"""Should return the name of the response model."""
... # pragma: no cover

@property
@abstractmethod
def id(self) -> Optional[str]:
"""Should return the id of the response."""
... # pragma: no cover

@property
@abstractmethod
def finish_reasons(self) -> Union[None, list[str]]:
"""Should return the finish reasons of the response.
If there is no finish reason, this method must return None.
"""
... # pragma: no cover

@property
@abstractmethod
def usage(self) -> Any:
"""Should return the usage of the response.
If there is no usage, this method must return None.
"""
... # pragma: no cover

@property
@abstractmethod
def input_tokens(self) -> Optional[Union[int, float]]:
"""Should return the number of input tokens.
If there is no input_tokens, this method must return None.
"""
... # pragma: no cover

@property
@abstractmethod
def output_tokens(self) -> Optional[Union[int, float]]:
"""Should return the number of output tokens.
If there is no output_tokens, this method must return None.
"""
... # pragma: no cover
63 changes: 63 additions & 0 deletions mirascope/cohere/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
StreamedChatResponse_SearchQueriesGeneration,
StreamedChatResponse_SearchResults,
StreamedChatResponse_StreamEnd,
StreamedChatResponse_StreamStart,
StreamedChatResponse_ToolCallsGeneration,
)
from cohere.types import (
Expand Down Expand Up @@ -130,6 +131,24 @@ def content(self) -> str:
"""Returns the content of the chat completion for the 0th choice."""
return self.response.text

@property
def model(self) -> Optional[str]:
"""Returns the name of the response model.
Cohere does not return model, so we return None
"""
return None

@property
def id(self) -> Optional[str]:
"""Returns the id of the response."""
return self.response.generation_id

@property
def finish_reasons(self) -> Optional[list[str]]:
"""Returns the finish reasons of the response."""
return [str(self.response.finish_reason)]

@property
def search_queries(self) -> Optional[list[ChatSearchQuery]]:
"""Returns the search queries for the 0th choice message."""
Expand Down Expand Up @@ -307,6 +326,28 @@ def citations(self) -> Optional[list[ChatCitation]]:
return self.chunk.citations
return None

@property
def model(self) -> Optional[str]:
"""Returns the name of the response model.
Cohere does not return model, so we return None
"""
return None

@property
def id(self) -> Optional[str]:
"""Returns the id of the response."""
if isinstance(self.chunk, StreamedChatResponse_StreamStart):
return self.chunk.generation_id
return None

@property
def finish_reasons(self) -> Optional[list[str]]:
"""Returns the finish reasons of the response."""
if isinstance(self.chunk, StreamedChatResponse_StreamEnd):
return [str(self.chunk.finish_reason)]
return None

@property
def response(self) -> Optional[NonStreamedChatResponse]:
"""Returns the full response for the stream-end event type else None."""
Expand All @@ -321,6 +362,28 @@ def tool_calls(self) -> Optional[list[ToolCall]]:
return self.chunk.tool_calls
return None

@property
def usage(self) -> Optional[ApiMetaBilledUnits]:
"""Returns the usage of the response."""
if isinstance(self.chunk, StreamedChatResponse_StreamEnd):
if self.chunk.response.meta:
return self.chunk.response.meta.billed_units
return None

@property
def input_tokens(self) -> Optional[float]:
"""Returns the number of input tokens."""
if self.usage:
return self.usage.input_tokens
return None

@property
def output_tokens(self) -> Optional[float]:
"""Returns the number of output tokens."""
if self.usage:
return self.usage.output_tokens
return None


class CohereEmbeddingResponse(BaseEmbeddingResponse[SkipValidation[EmbedResponse]]):
"""A convenience wrapper around the Cohere `EmbedResponse` response."""
Expand Down
84 changes: 84 additions & 0 deletions mirascope/gemini/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,39 @@ def content(self) -> str:
"""Returns the contained string content for the 0th choice."""
return self.response.candidates[0].content.parts[0].text

@property
def id(self) -> Optional[str]:
"""Returns the id of the response.
google.generativeai does not return an id
"""
return None

@property
def finish_reasons(self) -> list[str]:
"""Returns the finish reasons of the response."""
finish_reasons = [
"FINISH_REASON_UNSPECIFIED",
"STOP",
"MAX_TOKENS",
"SAFETY",
"RECITATION",
"OTHER",
]

return [
finish_reasons[candidate.finish_reason]
for candidate in self.response.candidates
]

@property
def model(self) -> None:
"""Returns the model name.
google.generativeai does not return model, so we return None
"""
return None

@property
def usage(self) -> None:
"""Returns the usage of the chat completion.
Expand Down Expand Up @@ -192,3 +225,54 @@ class Math(GeminiCall):
def content(self) -> str:
"""Returns the chunk content for the 0th choice."""
return self.chunk.candidates[0].content.parts[0].text

@property
def id(self) -> Optional[str]:
"""Returns the id of the response.
google.generativeai does not return an id
"""
return None

@property
def finish_reasons(self) -> list[str]:
"""Returns the finish reasons of the response."""
finish_reasons = [
"FINISH_REASON_UNSPECIFIED",
"STOP",
"MAX_TOKENS",
"SAFETY",
"RECITATION",
"OTHER",
]

return [
finish_reasons[candidate.finish_reason]
for candidate in self.chunk.candidates
]

@property
def model(self) -> None:
"""Returns the model name.
google.generativeai does not return model, so we return None
"""
return None

@property
def usage(self) -> None:
"""Returns the usage of the chat completion.
google.generativeai does not have Usage, so we return None
"""
return None

@property
def input_tokens(self) -> None:
"""Returns the number of input tokens."""
return None

@property
def output_tokens(self) -> None:
"""Returns the number of output tokens."""
return None
Loading

0 comments on commit 9dc7db1

Please sign in to comment.