Skip to content

Commit

Permalink
Merge pull request #308 from Mirascope/better-message-insertion
Browse files Browse the repository at this point in the history
Convenience for generating the assistant message parameter from the response of a call
  • Loading branch information
willbakst authored Jun 7, 2024
2 parents c91ef8a + b13ab8e commit 1275904
Show file tree
Hide file tree
Showing 14 changed files with 77 additions and 3 deletions.
2 changes: 1 addition & 1 deletion examples/chat_history_librarian.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class Librarian(OpenAICall):
librarian.question = input("(User): ")
response = librarian.call()
librarian.history.append({"role": "user", "content": librarian.question})
librarian.history.append({"role": "assistant", "content": response.content})
librarian.history.append(response.message_param)
print(f"(Assistant): {response.content}")

# > (User): What fantasy book should I read?
Expand Down
6 changes: 6 additions & 0 deletions mirascope/anthropic/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
ContentBlockDeltaEvent,
ContentBlockStartEvent,
Message,
MessageParam,
MessageStreamEvent,
TextBlock,
TextDelta,
Expand Down Expand Up @@ -92,6 +93,11 @@ class BookRecommender(AnthropicCall):

response_format: Optional[Literal["json"]] = None

@property
def message_param(self) -> MessageParam:
"""Returns the assistant's response as a message parameter."""
return self.response.model_dump(include={"content", "role"}) # type: ignore

@property
def tools(self) -> Optional[list[AnthropicTool]]:
"""Returns the tools for the 0th choice message."""
Expand Down
5 changes: 5 additions & 0 deletions mirascope/base/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,11 @@ class BaseCallResponse(BaseModel, Generic[ResponseT, BaseToolT], ABC):

model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)

@property
def message_param(self) -> Any:
"""Returns the assistant's response as a message parameter."""
... # pragma: no cover

@property
@abstractmethod
def tools(self) -> Optional[list[BaseToolT]]:
Expand Down
9 changes: 9 additions & 0 deletions mirascope/cohere/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,15 @@ class BookRecommender(CohereCall):
# We need to skip validation since it's a pydantic_v1 model and breaks validation.
response: SkipValidation[NonStreamedChatResponse]

@property
def message_param(self) -> ChatMessage:
"""Returns the assistant's response as a message parameter."""
return ChatMessage(
message=self.response.text,
tool_calls=self.response.tool_calls,
role="assistant", # type: ignore
)

@property
def content(self) -> str:
"""Returns the content of the chat completion for the 0th choice."""
Expand Down
6 changes: 6 additions & 0 deletions mirascope/gemini/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from google.generativeai.types import ( # type: ignore
AsyncGenerateContentResponse,
ContentDict,
GenerateContentResponse,
)

Expand Down Expand Up @@ -75,6 +76,11 @@ class BookRecommender(GeminiPrompt):
```
"""

@property
def message_param(self) -> ContentDict:
"""Returns the models's response as a message parameter."""
return {"role": "model", "parts": self.response.parts}

@property
def tools(self) -> Optional[list[GeminiTool]]:
"""Returns the list of tools for the 0th candidate's 0th content part."""
Expand Down
7 changes: 6 additions & 1 deletion mirascope/groq/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Any, Optional, Type, Union

from groq._types import Body, Headers, Query
from groq.types.chat import ChatCompletion
from groq.types.chat import ChatCompletion, ChatCompletionAssistantMessageParam
from groq.types.chat.chat_completion import (
ChatCompletionMessage,
Choice,
Expand Down Expand Up @@ -104,6 +104,11 @@ class BookRecommender(GroqCall):

response_format: Optional[ResponseFormat] = None

@property
def message_param(self) -> ChatCompletionAssistantMessageParam:
"""Returns the assistants's response as a message parameter."""
return self.message.model_dump(exclude={"function_call"}) # type: ignore

@property
def choices(self) -> list[Choice]:
"""Returns the array of chat completion choices."""
Expand Down
7 changes: 6 additions & 1 deletion mirascope/mistral/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
)
from pydantic import ConfigDict

from ..base import BaseCallParams, BaseCallResponse, BaseCallResponseChunk
from ..base import BaseCallParams, BaseCallResponse, BaseCallResponseChunk, Message
from .tools import MistralTool


Expand Down Expand Up @@ -68,6 +68,11 @@ class BookRecommender(MistralCall):
"""

@property
def message_param(self) -> Message:
"""Returns the assistants's response as a message parameter."""
return self.message.model_dump() # type: ignore

@property
def choices(self) -> list[ChatCompletionResponseChoice]:
"""Returns the array of chat completion choices."""
Expand Down
6 changes: 6 additions & 0 deletions mirascope/openai/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from openai.types import Embedding
from openai.types.chat import (
ChatCompletion,
ChatCompletionAssistantMessageParam,
ChatCompletionChunk,
ChatCompletionMessageToolCall,
ChatCompletionToolChoiceOptionParam,
Expand Down Expand Up @@ -104,6 +105,11 @@ class BookRecommender(OpenAICall):

response_format: Optional[ResponseFormat] = None

@property
def message_param(self) -> ChatCompletionAssistantMessageParam:
"""Returns the assistants's response as a message parameter."""
return self.message.model_dump(exclude={"function_call"}) # type: ignore

@property
def choices(self) -> list[Choice]:
"""Returns the array of chat completion choices."""
Expand Down
4 changes: 4 additions & 0 deletions tests/anthropic/test_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ def test_anthropic_call_response(fixture_anthropic_message: Message):
response=fixture_anthropic_message, start_time=0, end_time=1
)
assert response.content == "test"
assert response.message_param == {
"content": [{"text": "test", "type": "text"}],
"role": "assistant",
}
assert response.tools is None
assert response.tool is None
assert response.usage is not None
Expand Down
6 changes: 6 additions & 0 deletions tests/cohere/test_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from cohere.types import (
ChatCitation,
ChatDocument,
ChatMessage,
ChatSearchQuery,
ChatSearchResult,
NonStreamedChatResponse,
Expand Down Expand Up @@ -42,6 +43,11 @@ def test_cohere_call_response_properties(
response=fixture_non_streamed_response, start_time=0, end_time=0, cost=1
)

assert isinstance(call_response.message_param, ChatMessage)
assert call_response.message_param == ChatMessage(
message="Test response", tool_calls=[fixture_tool_call], role="assistant"
) # type: ignore

assert call_response.content == "Test response"
assert call_response.search_queries == [fixture_chat_search_query]
assert call_response.search_results == [fixture_chat_search_result]
Expand Down
5 changes: 5 additions & 0 deletions tests/gemini/test_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Type

import pytest
from google.ai.generativelanguage_v1beta import Part
from google.generativeai.types import GenerateContentResponse # type: ignore

from mirascope.gemini.tools import GeminiTool
Expand All @@ -19,6 +20,10 @@ def test_gemini_call_response(
end_time=0,
cost=None,
)
assert response.message_param == {
"role": "model",
"parts": [Part(text="Who is the author?")],
}
assert response.content == "Who is the author?"
assert response.tools is None
assert response.tool is None
Expand Down
5 changes: 5 additions & 0 deletions tests/groq/test_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ def test_groq_call_response(
start_time=0,
end_time=0,
)
assert response.message_param == {
"role": "assistant",
"content": "test content",
"tool_calls": [],
}
assert response.content == "test content"
assert response.tools is None
assert response.tool is None
Expand Down
7 changes: 7 additions & 0 deletions tests/mistral/test_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@ def test_mistral_call_response(
start_time=0,
end_time=0,
)
assert response.message_param == {
"role": "assistant",
"content": "test content",
"name": None,
"tool_call_id": None,
"tool_calls": None,
}
assert response.content == "test content"
assert response.tools is None
assert response.tool is None
Expand Down
5 changes: 5 additions & 0 deletions tests/openai/test_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ def test_openai_call_response(fixture_chat_completion: ChatCompletion):
response=fixture_chat_completion, start_time=0, end_time=0
)
choices = fixture_chat_completion.choices
assert response.message_param == {
"role": "assistant",
"content": choices[0].message.content,
"tool_calls": None,
}
assert response.choices == choices
assert response.choice == choices[0]
assert response.message == choices[0].message
Expand Down

0 comments on commit 1275904

Please sign in to comment.