Skip to content

Commit

Permalink
Initial Wingman@OpenAI version: JSon library creation and parsing; un…
Browse files Browse the repository at this point in the history
…it tests for JSon; configuration for Wingman #1514
  • Loading branch information
dvorka committed Jan 14, 2024
1 parent b62417b commit f92f5e4
Show file tree
Hide file tree
Showing 17 changed files with 606 additions and 119 deletions.
23 changes: 17 additions & 6 deletions app/src/qt/dialogs/chat_dialog.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ using namespace std;

const string COLOR_PROMPT_GREEN{"#00bb00"};
const string COLOR_PROMPT_BLUE{"#00aaaa"};
const string COLOR_PROMPT_GRAY{"#777777"};

ChatDialog::ChatDialog(QWidget* parent)
: QDialog(parent)
Expand Down Expand Up @@ -105,8 +106,6 @@ string ChatDialog::getTerminalPrompt(bool error)
"<hr/>"
"<font color='" + COLOR_PROMPT_BLUE + "'>@" + thing + "</font> " +
"<font color='" + COLOR_PROMPT_GREEN + "'><b>" + thingName + "</b></font>"
"&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;"
"<font color='" + COLOR_PROMPT_BLUE + "'>[" + wingmanModel + "]</font>"
"<br/>"
};

Expand All @@ -133,10 +132,17 @@ void ChatDialog::insertPrompt(const std::string& prompt)
chatWindow->ensureCursorVisible();
}

void ChatDialog::insertOutput(const std::string& output, bool error)
void ChatDialog::insertOutput(
const string& output, const string& outputDescriptor, bool error)
{
chatWindow->insertHtml(
QString::fromStdString(
"<br/>"
"<b>Answer:</b>"
" <font color='" + COLOR_PROMPT_GRAY + "'>" +
outputDescriptor +
"</font>" +
"<br/>" +
"<br/>" +
output +
"<br/>" +
Expand Down Expand Up @@ -179,17 +185,22 @@ void ChatDialog::runCommand()

// run prompt
MF_DEBUG("Running prompt: '" << cmd << "'" << endl);
// TODO status bar
string answerHtml{"Foo result Lorem ipsum dolor sit amet, consectetur adipiscing elit."};
string answerDescriptor{"[foo model]"};
int statusCode{0};
string cmdStdOut{"Foo result Lorem ipsum dolor sit amet, consectetur adipiscing elit."};

// TODO run prompt
// TODO run prompt
// TODO run prompt

MF_DEBUG("Chat command finished with status: " << statusCode << endl);
if(cmdStdOut.size()) {
if(answerHtml.size()) {
// replaceAll("\n", "<br/>", cmdStdOut);
this->insertOutput(cmdStdOut, statusCode!=0?true:false);
this->insertOutput(
answerHtml,
answerDescriptor,
statusCode==0?false:true);
}
}
}
Expand Down
5 changes: 4 additions & 1 deletion app/src/qt/dialogs/chat_dialog.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,10 @@ class ChatDialog : public QDialog
~ChatDialog();

void insertPrompt(const std::string& prompt);
void insertOutput(const std::string& output, bool error=false);
void insertOutput(
const std::string& output,
const std::string& outputDescriptor,
bool error=false);

void show();

Expand Down
4 changes: 2 additions & 2 deletions app/src/qt/dialogs/wingman_dialog.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,10 @@ WingmanDialog::WingmanDialog(
promptEdit->setToolTip(
tr("Type in your prompt like: 'Translate the following text to Spanish: #CONTENT."));

promptsLayout->addWidget(predefinedPromptsLabel);
promptsLayout->addWidget(predefinedPromptsCombo);
promptsLayout->addWidget(promptLabel);
promptsLayout->addWidget(promptEdit);
promptsLayout->addWidget(predefinedPromptsLabel);
promptsLayout->addWidget(predefinedPromptsCombo);
promptsGroup->setLayout(promptsLayout);

// GROUP: content
Expand Down
97 changes: 83 additions & 14 deletions app/src/qt/main_window_presenter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ MainWindowPresenter::MainWindowPresenter(MainWindowView& view)
mind->getWingman()->getPredefinedNPrompts(),
mind->getWingman()->getPredefinedTPrompts(),
&view};
wingmanProgressDialog = nullptr;
chatDialog = new ChatDialog{&view};
scopeDialog = new ScopeDialog{mind->getOntology(), &view};
newOrganizerDialog = new OrganizerNewDialog{mind->getOntology(), &view};
Expand Down Expand Up @@ -283,6 +284,9 @@ MainWindowPresenter::~MainWindowPresenter()
//if(findNoteByNameDialog) delete findNoteByNameDialog;
if(insertImageDialog) delete insertImageDialog;
if(newLibraryDialog) delete newLibraryDialog;
if(wingmanDialog) delete wingmanDialog;
if(wingmanProgressDialog) delete wingmanProgressDialog;
if(chatDialog) delete chatDialog;

// TODO deletes
delete this->mdConfigRepresentation;
Expand Down Expand Up @@ -2040,7 +2044,19 @@ void MainWindowPresenter::handleLeftToolbarAction(string selectedTool)

void MainWindowPresenter::doActionWingman()
{
MF_DEBUG("SIGNAL handled: WINGMAN dialog...");
MF_DEBUG("SIGNAL handled: WINGMAN dialog..." << endl);
if(!config.isWingman()) {
QMessageBox msgBox{
QMessageBox::Critical,
QObject::tr("Wingman Not Available"),
QObject::tr(
"Wingman provider is either not configured or "
"it cannot be initialized.")
};
msgBox.exec();
return;
}

// get PHRASE from the active context:
// - N editor: get word under cursor OR selected text
// - N tree: get N name
Expand Down Expand Up @@ -2139,28 +2155,81 @@ void MainWindowPresenter::handleActionWingman()
MF_DEBUG("SIGNAL handled: WINGMAN dialog..." << endl);
this->wingmanDialog->hide();

string wingmanAnswer{};

// system prompt: prompt + context
// show progress bar
/*
if(wingmanProgressDialog == nullptr) {
wingmanProgressDialog = new QProgressDialog(
tr("Wingman is talking to GPT provider..."),
tr("Cancel"),
0,
100,
&view);
} else {
wingmanProgressDialog->reset();
}
wingmanProgressDialog->setWindowModality(Qt::WindowModal);
wingmanProgressDialog->show();
wingmanProgressDialog->setValue(5);
*/
statusBar->showInfo(QString(tr("Wingman is talking to GPT provider...")));

// resolve prompt to system prompt
string systemPrompt{this->wingmanDialog->getPromptText().toStdString()};
// prompt: resolve prompt w/ the context(s)
string prompt{this->wingmanDialog->getPromptText().toStdString()};
replaceAll(
CTX_INCLUDE_NAME,
this->wingmanDialog->getContextNameText().toStdString(),
systemPrompt);
prompt);
replaceAll(
CTX_INCLUDE_TEXT,
this->wingmanDialog->getContextText().toStdString(),
systemPrompt);
prompt);
// TODO wingmanProgressDialog->setValue(10);

// RUN Wingman
string httpResponse{};
WingmanStatusCode status{
WingmanStatusCode::WINGMAN_STATUS_CODE_OK
};
string errorMessage{};
string answerLlmModel{};
int promptTokens{};
int answerTokens{};
string answerHtml{};
// chat
mind->wingmanChat(
prompt,
config.getWingmanLlmModel(),
httpResponse,
status,
errorMessage,
answerLlmModel,
promptTokens,
answerTokens,
answerHtml
);
string answerDescriptor{
"[model: " + answerLlmModel +
", tokens (prompt/answer): " +
std::to_string(promptTokens) + "/" + std::to_string(answerTokens) +
", status: " +
(status==WingmanStatusCode::WINGMAN_STATUS_CODE_OK?"OK":"ERROR") +
"]"
};

// RUN wingman
// TODO route action to wingman handler
mind->wingmanSummarize(systemPrompt, wingmanAnswer);
// HIDE progress dialog
// TODO wingmanProgressDialog->setValue(100);
// TODO wingmanProgressDialog->hide();

// SHOW result
// TODO from huge prompts + suffix ...
this->chatDialog->insertPrompt(prompt);
this->chatDialog->insertOutput(
answerHtml,
answerDescriptor,
status==WingmanStatusCode::WINGMAN_STATUS_CODE_OK?false:true
);

// show result
this->chatDialog->insertPrompt(systemPrompt); // TODO trom huge prompts + suffix ...
this->chatDialog->insertOutput(wingmanAnswer);
statusBar->showInfo(QString(tr("Wingman got answer from the GPT provider")));
this->chatDialog->show();
}

Expand Down
1 change: 1 addition & 0 deletions app/src/qt/main_window_presenter.h
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ class MainWindowPresenter : public QObject
RemoveLibraryDialog* rmLibraryDialog;
RunToolDialog* runToolDialog;
WingmanDialog* wingmanDialog;
QProgressDialog* wingmanProgressDialog;
ChatDialog* chatDialog;
ScopeDialog* scopeDialog;
OrganizerNewDialog* newOrganizerDialog;
Expand Down
4 changes: 3 additions & 1 deletion build/make/test-lib-units.sh
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ export OPTION_RECOMPILE=yes # recompile before running test(s) (comment this lin
#export OPTION_TEST="MarkdownParserTestCase.Bug622Loop64kLinesOverflow"
#export OPTION_TEST="MarkdownParserTestCase.Deadline"
#export OPTION_TEST="MarkdownParserTestCase.Links"
export OPTION_TEST="MarkdownParserTestCase.LinksWithParenthesis"
#export OPTION_TEST="MarkdownParserTestCase.LinksWithParenthesis"
#export OPTION_TEST="MarkdownParserTestCase.MarkdownLexerLinks"
#export OPTION_TEST="MarkdownParserTestCase.MarkdownLexerSections"
#export OPTION_TEST="MarkdownParserTestCase.MarkdownLexerSectionsNoMetadata"
Expand Down Expand Up @@ -120,6 +120,8 @@ export OPTION_TEST="MarkdownParserTestCase.LinksWithParenthesis"
#export OPTION_TEST="StringGearTestCase.StringToNcName"
#export OPTION_TEST="TrieTestCase.*"
#export OPTION_TEST="TrieTestCase.AddAndRemove"
#export OPTION_TEST="JSonTestCase.SerializeOpenAiRequest"
export OPTION_TEST="JSonTestCase.ParseOpenAiResponse"

# environment - to be specified in .bashrc or elsewhere:
# export M8R_CPU_CORES=7
Expand Down
35 changes: 35 additions & 0 deletions lib/src/config/configuration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,11 @@ Configuration::Configuration()
autolinking{DEFAULT_AUTOLINKING},
autolinkingColonSplit{},
autolinkingCaseInsensitive{},
wingman{true},
wingmanProvider{},
wingmanShellEnvApiKey{true},
wingmanApiKey{},
wingmanLlmModel{"gpt-3.5-turbo"},
md2HtmlOptions{},
distributorSleepInterval{DEFAULT_DISTRIBUTOR_SLEEP_INTERVAL},
markdownQuoteSections{},
Expand Down Expand Up @@ -378,4 +383,34 @@ const char* Configuration::getEditorFromEnv()
return editor;
}

bool Configuration::isWingman() {
MF_DEBUG("Configuration::isWingman(" << wingman << "):" << endl);
if(wingman) {
MF_DEBUG(
" Wingman key @ env: " << wingmanShellEnvApiKey << endl <<
" Wingman key name : " << ENV_VAR_OPENAI_API_KEY << endl <<
" Wingman key : " << wingmanApiKey << endl
);
if(wingmanShellEnvApiKey && wingmanApiKey.empty()) {
// OpenAI wingman provider initialization
// user may have multiple OpenAI accounts and keys - get the key generated for MF
const char* apiKeyEnv
= std::getenv(ENV_VAR_OPENAI_API_KEY);
MF_DEBUG(" Wingman key loaded from env: " << apiKeyEnv << endl);
if(apiKeyEnv) {
wingmanApiKey = apiKeyEnv;
return true;
} else {
std::cerr << "OpenAI API key not found in the environment variable MINDFORGER_OPENAI_API_KEY." << std::endl;
wingman = false;
return false;
}
} else {
return true;
}
}
return false;
}


} // m8r namespace
11 changes: 11 additions & 0 deletions lib/src/config/configuration.h
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,9 @@ constexpr const auto TOOL_GOOGLE_SEARCH = "Google Search";
constexpr const auto TOOL_CHAT_GPT_WEB = "OpenAI chatGPT web";
constexpr const auto TOOL_WIKIPEDIA = "Wikipedia";

constexpr const auto ENV_VAR_OPENAI_API_KEY = "MINDFORGER_OPENAI_API_KEY";


// improve platform/language specific
constexpr const auto DEFAULT_NEW_OUTLINE = "# New Markdown File\n\nThis is a new Markdown file created by MindForger.\n\n#Section 1\nThe first section.\n\n";

Expand Down Expand Up @@ -265,6 +268,11 @@ class Configuration {
bool autolinking; // enable MD autolinking
bool autolinkingColonSplit;
bool autolinkingCaseInsensitive;
bool wingman; // is Wingman enabled
std::string wingmanProvider; // "OpenAI", "Google", "Mock"
bool wingmanShellEnvApiKey; // use API key from shell environment on MF load
std::string wingmanApiKey;
std::string wingmanLlmModel;
TimeScope timeScope;
std::string timeScopeAsString;
std::vector<std::string> tagsScope;
Expand Down Expand Up @@ -418,6 +426,9 @@ class Configuration {
void setAutolinkingColonSplit(bool autolinkingColonSplit) { this->autolinkingColonSplit=autolinkingColonSplit; }
bool isAutolinkingCaseInsensitive() const { return autolinkingCaseInsensitive; }
void setAutolinkingCaseInsensitive(bool autolinkingCaseInsensitive) { this->autolinkingCaseInsensitive=autolinkingCaseInsensitive; }
bool isWingman();
std::string getWingmanApiKey() const { return wingmanApiKey; }
std::string getWingmanLlmModel() const { return wingmanLlmModel; }
unsigned int getMd2HtmlOptions() const { return md2HtmlOptions; }
AssociationAssessmentAlgorithm getAaAlgorithm() const { return aaAlgorithm; }
void setAaAlgorithm(AssociationAssessmentAlgorithm aaa) { aaAlgorithm = aaa; }
Expand Down
30 changes: 22 additions & 8 deletions lib/src/mind/ai/llm/mock_wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,28 @@ MockWingman::~MockWingman()
{
}


void MockWingman::chat(const std::string& prompt, std::string& answer)
{
MF_DEBUG("MockWingman::summarize() text:" << prompt << endl);

answer.assign("chat(MOCK, '"+prompt+"')");

MF_DEBUG("MockWingman::summarize() summary:" << answer << endl);
void MockWingman::chat(
const string& prompt,
const string& llmModel,
string& httpResponse,
WingmanStatusCode& status,
string& errorMessage,
string& answerLlmModel,
int& promptTokens,
int& answerTokens,
string& answerHtml
) {
MF_DEBUG("MockWingman::chat() prompt:" << prompt << endl);

httpResponse.clear();
status=WingmanStatusCode::WINGMAN_STATUS_CODE_OK;
errorMessage.clear();
answerLlmModel.assign(llmModel);
promptTokens=42;
answerTokens=42198;
answerHtml.assign("chat(MOCK, '"+prompt+"')");

MF_DEBUG("MockWingman::chat() answer:" << answerHtml << endl);
}

} // m8r namespace
12 changes: 11 additions & 1 deletion lib/src/mind/ai/llm/mock_wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,17 @@ class MockWingman: Wingman
MockWingman& operator =(const MockWingman&&) = delete;
~MockWingman();

virtual void chat(const std::string& prompt, std::string& answer) override;
virtual void chat(
const std::string& prompt,
const std::string& llmModel,
std::string& httpResponse,
WingmanStatusCode& status,
std::string& errorMessage,
std::string& answerLlmModel,
int& promptTokens,
int& answerTokens,
std::string& answerHtml
) override;
};

}
Expand Down
Loading

0 comments on commit f92f5e4

Please sign in to comment.