Skip to content

Commit

Permalink
chore: reformat
Browse files Browse the repository at this point in the history
  • Loading branch information
umbertogriffo committed Dec 7, 2024
1 parent 2aeca30 commit fd7dd9b
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 26 deletions.
6 changes: 3 additions & 3 deletions chatbot/bot/client/lama_cpp_client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from pathlib import Path
from typing import Any, Iterator, Union
from typing import Any, Iterator

import requests
from llama_cpp import CreateCompletionResponse, CreateCompletionStreamResponse, Llama
Expand Down Expand Up @@ -158,7 +158,7 @@ def stream_answer(self, prompt: str, max_new_tokens: int = 512) -> str:

def start_answer_iterator_streamer(
self, prompt: str, max_new_tokens: int = 512
) -> Union[CreateCompletionResponse, Iterator[CreateCompletionStreamResponse]]:
) -> CreateCompletionResponse | Iterator[CreateCompletionStreamResponse]:
"""
Abstract method to start an answer iterator streamer for a given prompt.
Expand All @@ -181,7 +181,7 @@ def start_answer_iterator_streamer(

async def async_start_answer_iterator_streamer(
self, prompt: str, max_new_tokens: int = 512
) -> Union[CreateCompletionResponse, Iterator[CreateCompletionStreamResponse]]:
) -> CreateCompletionResponse | Iterator[CreateCompletionStreamResponse]:
"""
This abstract method should be implemented to asynchronously start an answer iterator streamer,
providing a flexible way to generate answers in a streaming fashion based on the given prompt.
Expand Down
10 changes: 5 additions & 5 deletions chatbot/bot/conversation/conversation_retrieval.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from asyncio import get_event_loop
from typing import Any, List, Tuple
from typing import Any

from entities.document import Document
from helpers.log import get_logger
Expand Down Expand Up @@ -30,7 +30,7 @@ def __init__(self, llm: LamaCppClient) -> None:
self.llm = llm
self.chat_history = []

def get_chat_history(self) -> List[Tuple[str, str]]:
def get_chat_history(self) -> list[tuple[str, str]]:
"""
Gets the chat history.
Expand All @@ -40,7 +40,7 @@ def get_chat_history(self) -> List[Tuple[str, str]]:
"""
return self.chat_history

def update_chat_history(self, question: str, answer: str) -> List[Tuple[str, str]]:
def update_chat_history(self, question: str, answer: str) -> list[tuple[str, str]]:
"""
Updates the chat history.
Expand All @@ -57,7 +57,7 @@ def update_chat_history(self, question: str, answer: str) -> List[Tuple[str, str

return self.chat_history

def keep_chat_history_size(self, max_size: int = 2) -> List[Tuple[str, str]]:
def keep_chat_history_size(self, max_size: int = 2) -> list[tuple[str, str]]:
"""
Keeps the list of chat history at the specified maximum size by popping out the oldest elements.
Expand Down Expand Up @@ -160,7 +160,7 @@ def answer(self, question: str, max_new_tokens: int = 512) -> Any:
def context_aware_answer(
ctx_synthesis_strategy: BaseSynthesisStrategy,
question: str,
retrieved_contents: List[Document],
retrieved_contents: list[Document],
max_new_tokens: int = 512,
):
if isinstance(ctx_synthesis_strategy, AsyncTreeSummarizationStrategy):
Expand Down
20 changes: 10 additions & 10 deletions chatbot/bot/conversation/ctx_strategy.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import asyncio
from enum import Enum
from typing import Any, List, Union
from typing import Any

import nest_asyncio
from entities.document import Document
Expand Down Expand Up @@ -35,7 +35,7 @@ def __init__(self, llm: LamaCppClient) -> None:
"""
self.llm = llm

def generate_response(self, retrieved_contents: List[Document], question: str, max_new_tokens: int = 512):
def generate_response(self, retrieved_contents: list[Document], question: str, max_new_tokens: int = 512):
"""
Generate a response using the synthesis strategy.
Expand All @@ -61,8 +61,8 @@ def __init__(self, llm: LamaCppClient):
super().__init__(llm)

def generate_response(
self, retrieved_contents: List[Document], question: str, max_new_tokens: int = 512
) -> Union[str, Any]:
self, retrieved_contents: list[Document], question: str, max_new_tokens: int = 512
) -> str | Any:
"""
Generate a response using create and refine strategy.
Expand Down Expand Up @@ -126,7 +126,7 @@ def __init__(self, llm: LamaCppClient):
super().__init__(llm)

def generate_response(
self, retrieved_contents: List[Document], question: str, max_new_tokens: int = 512, num_children: int = 2
self, retrieved_contents: list[Document], question: str, max_new_tokens: int = 512, num_children: int = 2
) -> Any:
"""
Generate a response using hierarchical summarization strategy.
Expand Down Expand Up @@ -170,9 +170,9 @@ def generate_response(

def combine_results(
self,
texts: List[str],
texts: list[str],
question: str,
cur_prompt_list: List[str],
cur_prompt_list: list[str],
max_new_tokens: int = 512,
num_children: int = 2,
) -> Any:
Expand Down Expand Up @@ -227,7 +227,7 @@ def __init__(self, llm: LamaCppClient):

async def generate_response(
self,
retrieved_contents: List[Document],
retrieved_contents: list[Document],
question: str,
max_new_tokens: int = 512,
num_children: int = 2,
Expand Down Expand Up @@ -278,9 +278,9 @@ async def generate_response(

async def combine_results(
self,
texts: List[str],
texts: list[str],
question: str,
cur_prompt_list: List[str],
cur_prompt_list: list[str],
max_new_tokens: int = 512,
num_children: int = 2,
):
Expand Down
10 changes: 5 additions & 5 deletions chatbot/bot/memory/vector_database/chroma.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ def add_texts(
Args:
texts (Iterable[str]): Texts to add to the vectorstore.
metadatas (Optional[List[dict]], optional): Optional list of metadatas.
ids (Optional[List[str]], optional): Optional list of IDs.
metadatas (list[dict] | None): Optional list of metadatas.
ids (list[dict] | None): Optional list of IDs.
Returns:
List[str]: List of IDs of the added texts.
Expand Down Expand Up @@ -257,7 +257,7 @@ def similarity_search(self, query: str, k: int = 4, filter: dict[str, str] | Non
Args:
query (str): Query text to search for.
k (int): Number of results to return. Defaults to 4.
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
filter (dict[str, str]|None): Filter by metadata. Defaults to None.
Returns:
List[Document]: List of documents most similar to the query text.
Expand All @@ -278,8 +278,8 @@ def similarity_search_with_score(
Args:
query (str): Query text to search for.
k (int): Number of results to return. Defaults to 4.
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
where_document (Optional[Dict[str, str]]): Filter by document content. Defaults to None.
filter (dict[str, str]|None): Filter by metadata. Defaults to None.
where_document (dict[str, str]|None): Filter by document content. Defaults to None.
**kwargs (Any): Additional keyword arguments.
Returns:
Expand Down
6 changes: 3 additions & 3 deletions chatbot/bot/model/model.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from abc import ABC
from typing import Any, Dict, Optional
from typing import Any


class Model(ABC):
url: str
file_name: str
config: Dict[str, Any]
config_answer: Optional[Dict[str, Any]]
config: dict[str, Any]
config_answer: dict[str, Any] | None

0 comments on commit fd7dd9b

Please sign in to comment.