Skip to content

Commit

Permalink
Update Docs
Browse files Browse the repository at this point in the history
  • Loading branch information
Maximilian-Winter committed May 5, 2024
1 parent 4c85f0b commit 462b51d
Show file tree
Hide file tree
Showing 12 changed files with 706 additions and 59 deletions.
16 changes: 16 additions & 0 deletions docs/agent_chains.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
## Agent Chains

### Agent Chain Element (AgentChainElement)
Element of an agent chain

::: llama_cpp_agent.chain.AgentChainElement

### Sequential Chain (AgentChain)
Sequentially invoked chain.

::: llama_cpp_agent.chain.AgentChain

### Map Chain (MapChain)
Maps over a list of items and then combines the results using another chain

::: llama_cpp_agent.chain.MapChain
6 changes: 3 additions & 3 deletions docs/knowledge-graph-example.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Knowledge Graph Creation Example
### Knowledge Graph Creation Example
This example, based on an example of the Instructor library for OpenAI,
demonstrates how to create a knowledge graph using the llama-cpp-agent framework.
```python
Expand Down Expand Up @@ -53,7 +53,7 @@ gbnf_grammar, documentation = generate_gbnf_grammar_and_documentation([Knowledge


llama_cpp_agent = LlamaCppAgent(main_model, debug_output=True,
system_prompt="You are an advanced AI assistant responding in JSON format.\n\nAvailable JSON response models:\n\n" + documentation)
system_prompt="You are an advanced AI assistant responding in JSON format.\n\nAvailable JSON response models:\n\n" + documentation)


from graphviz import Digraph
Expand All @@ -77,7 +77,7 @@ def visualize_knowledge_graph(kg: KnowledgeGraph):
def generate_graph(user_input: str) -> KnowledgeGraph:
prompt = f'''Help me understand the following by describing it as a detailed knowledge graph: {user_input}'''.strip()
response = llama_cpp_agent.get_chat_response(message=prompt, temperature=0.65, mirostat_mode=0, mirostat_tau=3.0,
mirostat_eta=0.1, grammar=gbnf_grammar)
mirostat_eta=0.1, grammar=gbnf_grammar)
knowledge_graph = json.loads(response)
cls = KnowledgeGraph
knowledge_graph = cls(**knowledge_graph)
Expand Down
104 changes: 104 additions & 0 deletions docs/llama_index_tool_use.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
### llama-index tools example
```python
# Example that uses the FunctionCallingAgent class to use llama_index tools and query engines.

# Import necessary classes of llama-cpp-agent
from llama_cpp_agent.function_calling import LlamaCppFunctionTool
from llama_cpp_agent.function_calling_agent import FunctionCallingAgent
from llama_cpp_agent.messages_formatter import MessagesFormatterType
from llama_cpp_agent.providers.llama_cpp_endpoint_provider import LlamaCppEndpointSettings, LlamaCppGenerationSettings

# Code taken from llama-index example to create a query engine for asking questions
# https://docs.llamaindex.ai/en/stable/examples/agent/react_agent_with_query_engine/

# Import necessary classes of llama-index
from llama_index.core import (
SimpleDirectoryReader,
VectorStoreIndex,
StorageContext,
load_index_from_storage, Settings,
)
from llama_index.core.tools import QueryEngineTool, ToolMetadata

# Setting the default llm of llama-index to None, llama-index will throw error otherwise!
Settings.llm = None


# load data
lyft_docs = SimpleDirectoryReader(
input_files=["./data/10k/lyft_2021.pdf"]
).load_data()
uber_docs = SimpleDirectoryReader(
input_files=["./data/10k/uber_2021.pdf"]
).load_data()

# build index
lyft_index = VectorStoreIndex.from_documents(lyft_docs, embed_model="local")
uber_index = VectorStoreIndex.from_documents(uber_docs, embed_model="local")

# Create the query engines for lyft and uber.

lyft_engine = lyft_index.as_query_engine(similarity_top_k=3)
uber_engine = uber_index.as_query_engine(similarity_top_k=3)

# Create a list query engine tools.
query_engine_tools = [
QueryEngineTool(
query_engine=lyft_engine,
metadata=ToolMetadata(
name="lyft_10k",
description=(
"Provides information about Lyft financials for year 2021. "
"Use a detailed plain text question as input to the tool."
),
),
),
QueryEngineTool(
query_engine=uber_engine,
metadata=ToolMetadata(
name="uber_10k",
description=(
"Provides information about Uber financials for year 2021. "
"Use a detailed plain text question as input to the tool."
),
),
),
]

# Initialize the llama-cpp-agent LLM and the generation parameters.
generation_settings = LlamaCppGenerationSettings(temperature=0.45, top_p=1.0, top_k=0, stream=True)
main_model = LlamaCppEndpointSettings("http://localhost:8080/completion")

# Creating LlamaCppFunctionTool instances out of the llama-index query engine tools.
# We pass the llama-index query engine tools to the from_llama_index_tool function of the LlamaCppFunctionTool class and create the llama-cpp-agent tools.
lyft_query_engine_tool = LlamaCppFunctionTool.from_llama_index_tool(query_engine_tools[0])

uber_query_engine_tool = LlamaCppFunctionTool.from_llama_index_tool(query_engine_tools[1])


function_call_agent = FunctionCallingAgent(
main_model,
llama_generation_settings=generation_settings,
# Pass the LlamaCppFunctionTool instances as a list to the agent.
llama_cpp_function_tools=[lyft_query_engine_tool, uber_query_engine_tool],
allow_parallel_function_calling=True,
messages_formatter_type=MessagesFormatterType.CHATML,
debug_output=True)

user_input = "What was Lyft's revenue growth in 2021?"
function_call_agent.generate_response(user_input)

```
Example Output:
```text
[
{
"thoughts_and_reasoning": "The user has asked for Lyft's revenue growth in the year 2021. Based on the context information provided by the 'lyft_10k' function call, we can determine that Lyft's revenue increased by 36% in 2021 compared to the previous year.",
"function": "send_message",
"parameters": {
"content": "Lyft's revenue grew by 36% in the year 2021."
}
}
]
Lyft's revenue grew by 36% in the year 2021.
```
79 changes: 29 additions & 50 deletions docs/manual-function-calling.md
Original file line number Diff line number Diff line change
@@ -1,72 +1,51 @@
# Manual Function Calling Example
### Manual Function Calling Example
This example shows how to do function calling with pydantic models.
You can also convert Python functions with type hints, automatically to pydantic models using the function:
`create_dynamic_model_from_function` under: `llama_cpp_agent.gbnf_grammar_generator.gbnf_grammar_from_pydantic_models`

```python
from enum import Enum
import json
import math
from typing import Type, Union

from llama_cpp import Llama
from pydantic import BaseModel, Field
from llama_cpp import Llama, LlamaGrammar

from llama_cpp_agent.llm_agent import LlamaCppAgent
from llama_cpp_agent.gbnf_grammar_generator.gbnf_grammar_from_pydantic_models import \
generate_gbnf_grammar_and_documentation, create_dynamic_model_from_function
from llama_cpp_agent.providers.llama_cpp_endpoint_provider import LlamaCppEndpointSettings

from llama_cpp_agent.messages_formatter import MessagesFormatterType
from llama_cpp_agent.function_calling import LlamaCppFunctionTool

def calculate_a_to_the_power_b(a: Union[int | float], b: Union[int | float]):
"""
Calculates a to the power of b
# Simple calculator tool for the agent that can add, subtract, multiply, and divide.
class MathOperation(Enum):
ADD = "add"
SUBTRACT = "subtract"
MULTIPLY = "multiply"
DIVIDE = "divide"

Args:
a: number
b: exponent
class Calculator(BaseModel):
"""
Perform a math operation on two numbers.
"""
number_one: float = Field(..., description="First number.", max_precision=5, min_precision=2)
operation: MathOperation = Field(..., description="Math operation to perform.")
number_two: float = Field(..., description="Second number.", max_precision=5, min_precision=2)

def run(self):
if self.operation == MathOperation.ADD:
return self.number_one + self.number_two
elif self.operation == MathOperation.SUBTRACT:
return self.number_one - self.number_two
elif self.operation == MathOperation.MULTIPLY:
return self.number_one * self.number_two
elif self.operation == MathOperation.DIVIDE:
return self.number_one / self.number_two
else:
raise ValueError("Unknown operation.")
print(f"Result: {math.pow(a, b)}")


function_tools = [LlamaCppFunctionTool(Calculator)]
DynamicSampleModel = create_dynamic_model_from_function(calculate_a_to_the_power_b)

function_tool_registry = LlamaCppAgent.get_function_tool_registry(function_tools)
grammar, documentation = generate_gbnf_grammar_and_documentation([DynamicSampleModel], outer_object_name="function",
outer_object_content="params")

main_model = Llama(
"../gguf-models/dolphin-2.6-mistral-7b-Q8_0.gguf",
n_gpu_layers=35,
f16_kv=True,
use_mlock=False,
embedding=False,
n_threads=8,
n_batch=1024,
n_ctx=8192,
last_n_tokens_size=1024,
verbose=False,
seed=42,
main_model = LlamaCppEndpointSettings(
completions_endpoint_url="http://127.0.0.1:8080/completion"
)
llama_cpp_agent = LlamaCppAgent(main_model, debug_output=False,
system_prompt="You are an advanced AI, tasked to assist the user by calling functions in JSON format.\n\n\n" + function_tool_registry.get_documentation(),
predefined_messages_formatter_type=MessagesFormatterType.CHATML)
user_input = 'What is 42 * 42?'
print(llama_cpp_agent.get_chat_response(user_input, temperature=0.45, function_tool_registry=function_tool_registry))

llama_cpp_agent = LlamaCppAgent(main_model, debug_output=True,
system_prompt="You are an advanced AI, tasked to generate JSON objects for function calling.\n\n" + documentation)

response = llama_cpp_agent.get_chat_response("a= 5, b = 42", temperature=0.15, grammar=grammar)

function_call = json.loads(response)

instance = DynamicSampleModel(**function_call['params'])
instance.run()
```
Example output
```text
Expand Down
54 changes: 54 additions & 0 deletions docs/manual_function_calling_with_python_function.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
### Manual Function Calling with Python Function Example
This example shows how to do function calling using actual Python functions.

```python
from llama_cpp import Llama
from typing import Union
import math

from llama_cpp_agent.llm_agent import LlamaCppAgent

from llama_cpp_agent.messages_formatter import MessagesFormatterType
from llama_cpp_agent.function_calling import LlamaCppFunctionTool
from llama_cpp_agent.gbnf_grammar_generator.gbnf_grammar_from_pydantic_models import create_dynamic_model_from_function


def calculate_a_to_the_power_b(a: Union[int, float], b: Union[int, float]):
"""
Calculates 'a' to the power 'b' and returns the result
"""
return f"Result: {math.pow(a, b)}"


function_tools = [LlamaCppFunctionTool(calculate_a_to_the_power_b)]

function_tool_registry = LlamaCppAgent.get_function_tool_registry(function_tools)

main_model = Llama(
"../../gguf-models/openhermes-2.5-mistral-7b-16k.Q8_0.gguf",
n_gpu_layers=49,
offload_kqv=True,
f16_kv=True,
use_mlock=False,
embedding=False,
n_threads=8,
n_batch=1024,
n_ctx=8192,
last_n_tokens_size=1024,
verbose=True,
seed=42,
)

llama_cpp_agent = LlamaCppAgent(main_model, debug_output=True,
system_prompt="You are an advanced AI, tasked to assist the user by calling functions in JSON format. The following are the available functions and their parameters and types:\n\n" + function_tool_registry.get_documentation(),
predefined_messages_formatter_type=MessagesFormatterType.CHATML)
user_input = "Calculate 5 to power 42"

print(llama_cpp_agent.get_chat_response(user_input, temperature=0.45, function_tool_registry=function_tool_registry))

```
Example output
```text
{ "function": "calculate-a-to-the-power-b","function_parameters": { "a": 5 , "b": 42 }}
Result: 2.2737367544323207e+29
```
Loading

0 comments on commit 462b51d

Please sign in to comment.