Skip to content

Commit

Permalink
Fixes issues with virtual game master.
Browse files Browse the repository at this point in the history
  • Loading branch information
Maximilian-Winter committed Jun 6, 2024
1 parent 79d3ee2 commit d29df44
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 66 deletions.
6 changes: 3 additions & 3 deletions examples/07_Memory/VirtualGameMaster/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@

settings = provider.get_provider_default_settings()
settings.n_predict = 1024
settings.temperature = 0.35
settings.top_k = 0
settings.top_p = 1.0
settings.temperature = 0.65
settings.top_k = 40
settings.top_p = 0.85

memory_section = SystemPromptModule("memory",
"The following section shows the count of memories in archival memory and chat history memory and the current content of your core memory:")
Expand Down
4 changes: 2 additions & 2 deletions examples/07_Memory/VirtualGameMaster/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,6 @@
2. Archival Memory - Archive to store and retrieve general information and events about the player and the game-world. Can be used by calling the functions: 'archival_memory_search' and 'archival_memory_insert'.
3. Conversation History - Since you are only seeing the latest player message, you have access to a conversation history. Stores the conversation history between you and the player. Can be searched by using: 'conversation_search' and 'conversation_search_date'.
3. Conversation History - Since you are only seeing the latest conversation history, you can search the rest of the conversation history. Search it by using: 'conversation_search' and 'conversation_search_date'.
Always remember tha!"""
Always remember that the player can't see your memory or your interactions with it!"""
81 changes: 20 additions & 61 deletions src/llama_cpp_agent/llm_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,20 +434,21 @@ def get_response_role_and_completion(

additional_suffix = ""
if self.add_tools_and_structures_documentation_to_system_prompt:
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""
if structured_output_settings.output_type != LlmStructuredOutputType.no_structured_output:
# additional_suffix = "\n"
thoughts_and_reasoning = ""

if structured_output_settings.output_type == LlmStructuredOutputType.function_calling or structured_output_settings.output_type == LlmStructuredOutputType.parallel_function_calling:
if structured_output_settings.add_thoughts_and_reasoning_field and self.provider.is_using_json_schema_constraints():
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""

thoughts_and_reasoning = function_calling_thoughts_and_reasoning_templater
thoughts_and_reasoning = thoughts_and_reasoning.generate_prompt({
"thoughts_and_reasoning_field_name": "001_" + structured_output_settings.thoughts_and_reasoning_field_name})
Expand All @@ -472,15 +473,7 @@ def get_response_role_and_completion(
{"function_list": function_list})})
messages[0]["content"] = system_prompt
elif not structured_output_settings.add_thoughts_and_reasoning_field and self.provider.is_using_json_schema_constraints():
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""
thoughts_and_reasoning = ""

function_field_name = "001_" + structured_output_settings.function_calling_name_field_name
arguments_field_name = "002_" + structured_output_settings.function_calling_content
heartbeat_beats = ""
Expand All @@ -502,14 +495,7 @@ def get_response_role_and_completion(
{"function_list": function_list})})
messages[0]["content"] = system_prompt
elif structured_output_settings.add_thoughts_and_reasoning_field and not self.provider.is_using_json_schema_constraints():
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""

thoughts_and_reasoning = function_calling_thoughts_and_reasoning_templater
thoughts_and_reasoning = thoughts_and_reasoning.generate_prompt({
"thoughts_and_reasoning_field_name": structured_output_settings.thoughts_and_reasoning_field_name})
Expand All @@ -534,14 +520,7 @@ def get_response_role_and_completion(
{"function_list": function_list})})
messages[0]["content"] = system_prompt
elif not structured_output_settings.add_thoughts_and_reasoning_field and not self.provider.is_using_json_schema_constraints():
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""

thoughts_and_reasoning = ""
function_field_name = structured_output_settings.function_calling_name_field_name
arguments_field_name = structured_output_settings.function_calling_content
Expand All @@ -565,14 +544,7 @@ def get_response_role_and_completion(
messages[0]["content"] = system_prompt
elif structured_output_settings.output_type == LlmStructuredOutputType.object_instance or structured_output_settings.output_type == LlmStructuredOutputType.list_of_objects:
if structured_output_settings.add_thoughts_and_reasoning_field and self.provider.is_using_json_schema_constraints():
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""

thoughts_and_reasoning = structured_output_thoughts_and_reasoning_templater
thoughts_and_reasoning = thoughts_and_reasoning.generate_prompt({
"thoughts_and_reasoning_field_name": "001_" + structured_output_settings.thoughts_and_reasoning_field_name})
Expand Down Expand Up @@ -613,14 +585,7 @@ def get_response_role_and_completion(
"output_models": output_models})
messages[0]["content"] = system_prompt
elif structured_output_settings.add_thoughts_and_reasoning_field and not self.provider.is_using_json_schema_constraints():
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""

thoughts_and_reasoning = structured_output_thoughts_and_reasoning_templater
thoughts_and_reasoning = thoughts_and_reasoning.generate_prompt({
"thoughts_and_reasoning_field_name": structured_output_settings.thoughts_and_reasoning_field_name})
Expand All @@ -638,15 +603,7 @@ def get_response_role_and_completion(
"output_models": output_models})
messages[0]["content"] = system_prompt
elif not structured_output_settings.add_thoughts_and_reasoning_field and not self.provider.is_using_json_schema_constraints():
after_system_instructions_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.after_system_instructions:
after_system_instructions_list.append(module.get_formatted_content())
if len(after_system_instructions_list) > 0:
after_system_instructions = "\n\n".join(after_system_instructions_list)
else:
after_system_instructions = ""
thoughts_and_reasoning = ""

model_field_name = structured_output_settings.output_model_name_field_name
fields_field_name = structured_output_settings.output_model_attributes_field_name

Expand All @@ -661,16 +618,18 @@ def get_response_role_and_completion(
"output_models": output_models})
messages[0]["content"] = system_prompt

if structured_output_settings.output_type == LlmStructuredOutputType.no_structured_output or structured_output_settings is None:
messages[0]["content"] += "\n" + after_system_instructions
at_end_list = []
for module in system_prompt_modules:
if module.position == SystemPromptModulePosition.at_end:
at_end_list.append(module.get_formatted_content())
if len(at_end_list) > 0:
after_system_instructions = "\n\n".join(at_end_list)
at_end_list = "\n\n".join(at_end_list)
else:
after_system_instructions = ""
at_end_list = ""

messages[0]["content"] += after_system_instructions
messages[0]["content"] += "\n" + at_end_list
prompt, response_role = self.messages_formatter.format_conversation(
messages, Roles.assistant
)
Expand Down

0 comments on commit d29df44

Please sign in to comment.