Skip to content

Commit

Permalink
Added web search code as part of the framework
Browse files Browse the repository at this point in the history
  • Loading branch information
Maximilian-Winter committed May 26, 2024
1 parent d29b791 commit 4c374e6
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 40 deletions.
45 changes: 45 additions & 0 deletions examples/03_Tools_And_Function_Calling/web_search_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from llama_cpp_agent import MessagesFormatterType, LlamaCppAgent
from llama_cpp_agent.llm_output_settings import LlmStructuredOutputSettings
from llama_cpp_agent.providers import LlamaCppServerProvider
from llama_cpp_agent.tools import WebSearchTool

def send_message_to_user(message: str):
"""
Send a message to user.
Args:
message (str): Message to send.
"""
print(message)


provider = LlamaCppServerProvider("http://localhost:8080")
agent = LlamaCppAgent(
provider,
debug_output=True,
system_prompt="You are a helpful assistant. Use additional available information you have access to when giving a response. Always give detailed and long responses. Format your response, well structured in markdown format.",
predefined_messages_formatter_type=MessagesFormatterType.CHATML,
add_tools_and_structures_documentation_to_system_prompt=True,
)

search_tool = WebSearchTool(provider, MessagesFormatterType.CHATML, 20000)

settings = provider.get_provider_default_settings()

settings.temperature = 0.65
# settings.top_p = 0.85
# settings.top_k = 60
# settings.tfs_z = 0.95
settings.max_tokens = 2048
output_settings = LlmStructuredOutputSettings.from_functions(
[search_tool.get_tool(), send_message_to_user])
user = input(">")
result = agent.get_chat_response(user, prompt_suffix="\n```json\n",
llm_sampling_settings=settings, structured_output_settings=output_settings)
while True:
if result[0]["function"] == "send_message_to_user":
user = input(">")
result = agent.get_chat_response(user, prompt_suffix="\n```json\n", structured_output_settings=output_settings,
llm_sampling_settings=settings)
else:
result = agent.get_chat_response(result[0]["return_value"], role=Roles.tool, prompt_suffix="\n```json\n",
structured_output_settings=output_settings, llm_sampling_settings=settings)
1 change: 1 addition & 0 deletions src/llama_cpp_agent/tools/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from web_search import WebSearchTool, WebSearchProvider, WebCrawler, TrafilaturaWebCrawler, DDGWebSearchProvider
4 changes: 4 additions & 0 deletions src/llama_cpp_agent/tools/web_search/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from tool import WebSearchTool
from web_search_interfaces import WebCrawler, WebSearchProvider
from default_web_crawlers import TrafilaturaWebCrawler
from default_web_search_providers import DDGWebSearchProvider
Original file line number Diff line number Diff line change
Expand Up @@ -67,44 +67,4 @@ def get_tool(self):
return self.search_web


def send_message_to_user(message: str):
"""
Send a message to user.
Args:
message (str): Message to send.
"""
print(message)


provider = LlamaCppServerProvider("http://hades.hq.solidrust.net:8084")
#provider = LlamaCppServerProvider("http://localhost:8080")
agent = LlamaCppAgent(
provider,
debug_output=True,
system_prompt="You are a helpful assistant. Use additional available information you have access to when giving a response. Always give detailed and long responses. Format your response, well structured in markdown format.",
predefined_messages_formatter_type=MessagesFormatterType.CHATML,
add_tools_and_structures_documentation_to_system_prompt=True,
)

search_tool = WebSearchTool(provider, MessagesFormatterType.CHATML, 20000)

settings = provider.get_provider_default_settings()

settings.temperature = 0.65
# settings.top_p = 0.85
# settings.top_k = 60
# settings.tfs_z = 0.95
settings.max_tokens = 2048
output_settings = LlmStructuredOutputSettings.from_functions(
[search_tool.get_tool(), send_message_to_user])
user = input(">")
result = agent.get_chat_response(user, prompt_suffix="\n```json\n",
llm_sampling_settings=settings, structured_output_settings=output_settings)
while True:
if result[0]["function"] == "send_message_to_user":
user = input(">")
result = agent.get_chat_response(user, prompt_suffix="\n```json\n", structured_output_settings=output_settings,
llm_sampling_settings=settings)
else:
result = agent.get_chat_response(result[0]["return_value"], role=Roles.tool, prompt_suffix="\n```json\n",
structured_output_settings=output_settings, llm_sampling_settings=settings)

0 comments on commit 4c374e6

Please sign in to comment.