Skip to content

Commit

Permalink
Inital workign ollama version (w/o Preferences dialog - configured in…
Browse files Browse the repository at this point in the history
… .md) #1539
  • Loading branch information
dvorka committed Mar 3, 2024
1 parent 229ec26 commit 83328d0
Show file tree
Hide file tree
Showing 10 changed files with 172 additions and 141 deletions.
3 changes: 2 additions & 1 deletion app/src/qt/main_window_presenter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2197,7 +2197,8 @@ void MainWindowPresenter::slotRunWingmanFromDialog(bool showDialog)
auto duration = std::chrono::duration_cast<std::chrono::seconds>(end - start);
// wingmanProgressDialog->hide();
string answerDescriptor{
"[model: " + commandWingmanChat.answerLlmModel +
"[provider: " + config.getWingmanLlmProviderAsString(config.getWingmanLlmProvider()) +
", model: " + commandWingmanChat.answerLlmModel +
", tokens (prompt/answer): " +
std::to_string(commandWingmanChat.promptTokens) + "/" + std::to_string(commandWingmanChat.answerTokens) +
", time: " +
Expand Down
6 changes: 5 additions & 1 deletion build/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,11 @@ MF_LANG := "en"
# Ubuntu distro: trusty xenial bionic focal jammy kinetic
DISTRO := "bionic"
# CPU cores thant can be used to build the project
CPU_CORES := 7
ifeq ($(shell hostname), skunkworks)
CPU_CORES := 10
else
CPU_CORES := 7
endif
# Qt version to be used by MindForger
# MF_QT_VERSION := 5.9.9
MF_QT_VERSION := 5.15.2
Expand Down
90 changes: 64 additions & 26 deletions lib/src/config/configuration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ const string Configuration::DEFAULT_UI_THEME_NAME = string{UI_DEFAULT_THEME};
const string Configuration::DEFAULT_UI_HTML_CSS_THEME = string{UI_DEFAULT_HTML_CSS_THEME};
const string Configuration::DEFAULT_EDITOR_FONT= string{UI_DEFAULT_EDITOR_FONT};
const string Configuration::DEFAULT_TIME_SCOPE = string{"0y0m0d0h0m"};
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = string{"gpt-3.5-turbo"};
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = string{"gpt-3.5-turbo"}; // "gpt-3.5-turbo" and "gpt-4" are symbolic names
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OLLAMA = string{"llama2"};

Configuration::Configuration()
: asyncMindThreshold{},
Expand All @@ -51,9 +52,10 @@ Configuration::Configuration()
autolinkingColonSplit{},
autolinkingCaseInsensitive{},
wingmanProvider{DEFAULT_WINGMAN_LLM_PROVIDER},
wingmanApiKey{},
wingmanOpenAiApiKey{},
wingmanLlmModel{DEFAULT_WINGMAN_LLM_MODEL_OPENAI},
wingmanOpenAiLlm{DEFAULT_WINGMAN_LLM_MODEL_OPENAI},
wingmanOllamaUrl{},
wingmanOllamaLlm{DEFAULT_WINGMAN_LLM_MODEL_OLLAMA},
md2HtmlOptions{},
distributorSleepInterval{DEFAULT_DISTRIBUTOR_SLEEP_INTERVAL},
markdownQuoteSections{},
Expand Down Expand Up @@ -150,9 +152,10 @@ void Configuration::clear()
autolinkingColonSplit = DEFAULT_AUTOLINKING_COLON_SPLIT;
autolinkingCaseInsensitive = DEFAULT_AUTOLINKING_CASE_INSENSITIVE;
wingmanProvider = DEFAULT_WINGMAN_LLM_PROVIDER;
wingmanApiKey.clear();
wingmanOpenAiApiKey.clear();
wingmanLlmModel.clear();
wingmanOpenAiLlm = DEFAULT_WINGMAN_LLM_MODEL_OPENAI;
wingmanOllamaUrl.clear();
wingmanOllamaLlm = DEFAULT_WINGMAN_LLM_MODEL_OLLAMA;
timeScopeAsString.assign(DEFAULT_TIME_SCOPE);
tagsScope.clear();
markdownQuoteSections = DEFAULT_MD_QUOTE_SECTIONS;
Expand Down Expand Up @@ -403,24 +406,27 @@ bool Configuration::canWingmanOpenAi()
return false;
}

bool Configuration::canWingmanOllama()
{
if(wingmanOllamaUrl.size() > 0) {
return true;
}

return false;
}

void Configuration::setWingmanLlmProvider(WingmanLlmProviders provider)
{
MF_DEBUG(
"Configuration::setWingmanLlmProvider(): "
<< std::to_string(provider) << endl);

wingmanProvider = provider;

// try to initialize Wingman @ given LLM provider,
// if it fails, then set it to false ~ disabled Wingman
initWingman();
}

bool Configuration::initWingmanMock()
{
if(canWingmanMock()) {
wingmanApiKey.clear();
wingmanLlmModel.clear();
return true;
}

Expand All @@ -436,35 +442,57 @@ bool Configuration::initWingmanMock()
bool Configuration::initWingmanOpenAi() {
MF_DEBUG(" Configuration::initWingmanOpenAi()" << endl);
if(canWingmanOpenAi()) {
// API key
MF_DEBUG(
" Wingman OpenAI API key found in the shell environment variable "
"MINDFORGER_OPENAI_API_KEY or set in MF config" << endl);
if(wingmanOpenAiApiKey.size() > 0) {
wingmanApiKey = wingmanOpenAiApiKey;
} else {
if(wingmanOpenAiApiKey.size() <= 0) {
const char* apiKeyEnv = std::getenv(ENV_VAR_OPENAI_API_KEY);
MF_DEBUG(" Wingman API key loaded from the env: " << apiKeyEnv << endl);
wingmanApiKey = apiKeyEnv;
wingmanOpenAiApiKey = apiKeyEnv;
}

// LLM model
const char* llmModelEnv = std::getenv(ENV_VAR_OPENAI_LLM_MODEL);
if(llmModelEnv) {
MF_DEBUG(" Wingman LLM model loaded from the env: " << llmModelEnv << endl);
wingmanLlmModel = llmModelEnv;
wingmanOpenAiLlm = llmModelEnv;
} else {
MF_DEBUG(" Wingman LLM model set to default: " << DEFAULT_WINGMAN_LLM_MODEL_OPENAI << endl);
wingmanLlmModel = DEFAULT_WINGMAN_LLM_MODEL_OPENAI;
wingmanOpenAiLlm = DEFAULT_WINGMAN_LLM_MODEL_OPENAI;
}
wingmanProvider = WingmanLlmProviders::WINGMAN_PROVIDER_OPENAI;
return true;
}

MF_DEBUG(
" Wingman OpenAI API key NEITHER found in the environment variable "
"MINDFORGER_OPENAI_API_KEY, NOR set in MF configuration" << endl);
wingmanApiKey.clear();
wingmanLlmModel.clear();
wingmanProvider = WingmanLlmProviders::WINGMAN_PROVIDER_NONE;
if(wingmanProvider == WingmanLlmProviders::WINGMAN_PROVIDER_OPENAI) {
wingmanProvider = WingmanLlmProviders::WINGMAN_PROVIDER_NONE;
}
return false;
}

/**
* @brief Check whether ollama Wingman requirements are satisfied.
*/
bool Configuration::initWingmanOllama() {
MF_DEBUG(" Configuration::initWingmanOllama()" << endl);
if(canWingmanOllama()) {
// OPTIONAL: LLM model
if(wingmanOllamaLlm.size() <= 0) {
MF_DEBUG(" Wingman LLM model for ollama set to default: " << DEFAULT_WINGMAN_LLM_MODEL_OLLAMA << endl);
wingmanOpenAiLlm = DEFAULT_WINGMAN_LLM_MODEL_OLLAMA;
}
wingmanProvider = WingmanLlmProviders::WINGMAN_PROVIDER_OLLAMA;
return true;
}

MF_DEBUG(
" Wingman ollama URL not set in the configuration" << endl);
if(wingmanProvider == WingmanLlmProviders::WINGMAN_PROVIDER_OLLAMA) {
wingmanProvider = WingmanLlmProviders::WINGMAN_PROVIDER_NONE;
}
return false;
}

Expand All @@ -474,7 +502,11 @@ bool Configuration::initWingman()
" BEFORE Configuration::initWingman():" << endl <<
" LLM provider: " << wingmanProvider << endl <<
" OpenAI API key env var name: " << ENV_VAR_OPENAI_API_KEY << endl <<
" Wingman provider API key : " << wingmanApiKey << endl
" OpenAI API key : " << wingmanOpenAiApiKey << endl <<
" OpenAI LLM env var name : " << ENV_VAR_OPENAI_LLM_MODEL << endl <<
" OpenAI LLM : " << wingmanOpenAiLlm << endl <<
" ollama URL : " << wingmanOllamaUrl << endl <<
" ollama LLM : " << wingmanOllamaLlm << endl
);

bool initialized = false;
Expand All @@ -493,6 +525,10 @@ bool Configuration::initWingman()
MF_DEBUG(" OpenAI Wingman provider CONFIGURED" << endl);
initialized = initWingmanOpenAi();
break;
case WingmanLlmProviders::WINGMAN_PROVIDER_OLLAMA:
MF_DEBUG(" ollama Wingman provider CONFIGURED" << endl);
initialized = initWingmanOllama();
break;
default:
MF_DEBUG(
" ERROR: unable to CONFIGURE UNKNOWN Wingman provider: "
Expand All @@ -502,15 +538,17 @@ bool Configuration::initWingman()

if(!initialized) {
wingmanProvider = WingmanLlmProviders::WINGMAN_PROVIDER_NONE;
wingmanApiKey.clear();
wingmanLlmModel.clear();
}

MF_DEBUG(
" BEFORE Configuration::initWingman():" << endl <<
" AFTER Configuration::initWingman():" << endl <<
" LLM provider: " << wingmanProvider << endl <<
" OpenAI API key env var name: " << ENV_VAR_OPENAI_API_KEY << endl <<
" Wingman provider API key : " << wingmanApiKey << endl
" OpenAI API key : " << wingmanOpenAiApiKey << endl <<
" OpenAI LLM env var name : " << ENV_VAR_OPENAI_LLM_MODEL << endl <<
" OpenAI LLM : " << wingmanOpenAiLlm << endl <<
" ollama URL : " << wingmanOllamaUrl << endl <<
" ollama LLM : " << wingmanOllamaLlm << endl
);

return initialized;
Expand Down
31 changes: 18 additions & 13 deletions lib/src/config/configuration.h
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ enum WingmanLlmProviders {
WINGMAN_PROVIDER_MOCK,
WINGMAN_PROVIDER_OPENAI,
WINGMAN_PROVIDER_OLLAMA
// TODO WINGMAN_PROVIDER_GOOGLE,
};

// const in constexpr makes value const
Expand Down Expand Up @@ -260,12 +259,14 @@ class Configuration {

static const std::string DEFAULT_ACTIVE_REPOSITORY_PATH;
static const std::string DEFAULT_TIME_SCOPE;

static const std::string DEFAULT_WINGMAN_LLM_MODEL_OPENAI;
static const std::string DEFAULT_WINGMAN_LLM_MODEL_OLLAMA;
static constexpr const WingmanLlmProviders DEFAULT_WINGMAN_LLM_PROVIDER = WingmanLlmProviders::WINGMAN_PROVIDER_NONE;

static constexpr const bool DEFAULT_AUTOLINKING = false;
static constexpr const bool DEFAULT_AUTOLINKING_COLON_SPLIT = true;
static constexpr const bool DEFAULT_AUTOLINKING_CASE_INSENSITIVE = true;
static constexpr const WingmanLlmProviders DEFAULT_WINGMAN_LLM_PROVIDER = WingmanLlmProviders::WINGMAN_PROVIDER_NONE;
static constexpr const bool DEFAULT_SAVE_READS_METADATA = true;

static constexpr const bool UI_DEFAULT_NERD_TARGET_AUDIENCE = true;
Expand Down Expand Up @@ -374,10 +375,11 @@ class Configuration {
- on change: re-init Wingman DIALOG (refresh pre-defined prompts)
*/
WingmanLlmProviders wingmanProvider; // "none", "Mock", "OpenAI", ...
std::string wingmanApiKey; // API key of the currently configured Wingman LLM provider
std::string wingmanOpenAiApiKey; // OpenAI API specified by user in the config, env or UI
std::string wingmanLlmModel; // preferred LLM model the currently configured provider, like "gpt-3.5-turbo"

std::string wingmanOpenAiLlm;
std::string wingmanOllamaUrl; // base URL like http://localhost:11434
std::string wingmanOllamaLlm;

TimeScope timeScope;
std::string timeScopeAsString;
std::vector<std::string> tagsScope;
Expand Down Expand Up @@ -553,6 +555,8 @@ class Configuration {
return "mock";
} else if(provider == WingmanLlmProviders::WINGMAN_PROVIDER_OPENAI) {
return "openai";
} else if(provider == WingmanLlmProviders::WINGMAN_PROVIDER_OLLAMA) {
return "ollama";
}

return "none";
Expand All @@ -563,24 +567,25 @@ class Configuration {
bool canWingmanMock() { return false; }
#endif
bool canWingmanOpenAi();
bool canWingmanOllama();
private:
bool initWingmanMock();
bool initWingmanOpenAi();
bool initWingmanOllama();
/**
* @brief Initialize Wingman's LLM provider.
*/
bool initWingman();
public:
std::string getWingmanOpenAiApiKey() const { return wingmanOpenAiApiKey; }
void setWingmanOpenAiApiKey(std::string apiKey) { wingmanOpenAiApiKey = apiKey; }
/**
* @brief Get API key of the currently configured Wingman LLM provider.
*/
std::string getWingmanApiKey() const { return wingmanApiKey; }
/**
* @brief Get preferred Wingman LLM provider model name.
*/
std::string getWingmanLlmModel() const { return wingmanLlmModel; }
std::string getWingmanOpenAiLlm() const { return wingmanOpenAiLlm; }
void setWingmanOpenAiLlm(std::string llm) { wingmanOpenAiLlm = llm; }
std::string getWingmanOllamaUrl() const { return wingmanOllamaUrl; }
void setWingmanOllamaUrl(std::string url) { wingmanOllamaUrl = url; }
std::string getWingmanOllamaLlm() const { return wingmanOllamaLlm; }
void setWingmanOllamaLlm(std::string llm) { wingmanOllamaLlm = llm; }

/**
* @brief Check whether a Wingman LLM provider is ready from
* the configuration perspective.
Expand Down
Loading

0 comments on commit 83328d0

Please sign in to comment.