Skip to content

Commit

Permalink
Merge pull request #31 from gnosis/gabriel/ollama-agent
Browse files Browse the repository at this point in the history
Created Ollama Langchain agent
  • Loading branch information
gabrielfior authored Mar 12, 2024
2 parents 79e4887 + 8f6f904 commit a337485
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 2 deletions.
7 changes: 5 additions & 2 deletions prediction_market_agent/agents/langchain_agent.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
from typing import Optional

from langchain.agents import AgentType, initialize_agent, load_tools
from langchain_community.llms import OpenAI
from langchain_core.language_models import BaseLLM
from prediction_market_agent_tooling.markets.agent_market import AgentMarket

from prediction_market_agent import utils
from prediction_market_agent.agents.abstract import AbstractAgent


class LangChainAgent(AbstractAgent):
def __init__(self) -> None:
def __init__(self, llm: Optional[BaseLLM] = None) -> None:
keys = utils.APIKeys()
llm = OpenAI(openai_api_key=keys.openai_api_key)
llm = OpenAI(openai_api_key=keys.openai_api_key) if not llm else llm
# Can use pre-defined search tool
# TODO: Tavily tool could give better results
# https://docs.tavily.com/docs/tavily-api/langchain
Expand Down
17 changes: 17 additions & 0 deletions prediction_market_agent/agents/ollama_langchain_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from langchain_community.llms.ollama import Ollama

from prediction_market_agent.agents.langchain_agent import LangChainAgent
from prediction_market_agent.tools.ollama_utils import is_ollama_running


class OllamaLangChainAgent(LangChainAgent):
def __init__(self) -> None:
# Make sure Ollama is running locally
if not is_ollama_running():
raise EnvironmentError(
"Ollama is not running, cannot instantiate Ollama agent"
)
llm = Ollama(
model="mistral", base_url="http://localhost:11434"
) # Mistral since it supports function calling
super().__init__(llm=llm)
6 changes: 6 additions & 0 deletions prediction_market_agent/tools/ollama_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import requests


def is_ollama_running(base_url: str = "http://localhost:11434") -> bool:
r = requests.get(f"{base_url}/api/tags")
return r.status_code == 200

0 comments on commit a337485

Please sign in to comment.