diff --git a/addon/globalPlugins/openai/__init__.py b/addon/globalPlugins/openai/__init__.py index ed3aab1..8b3271d 100644 --- a/addon/globalPlugins/openai/__init__.py +++ b/addon/globalPlugins/openai/__init__.py @@ -453,6 +453,18 @@ def __init__(self): len(apikeymanager._managers or []) ) ) + wx.CallAfter(self.futureMessage) + + def futureMessage(self): + if not conf["futureMessage"]: + msg = _("The future of this add-on is BasiliskLLM (a standalone application and a minimal NVDA add-on). We highly recommend you consider using BasiliskLLM instead of this one. Would you like to visit the BasiliskLLM website?") + if gui.messageBox( + msg, + "Open AI", + wx.YES_NO | wx.ICON_INFORMATION + ) == wx.YES: + self.onBasiliskLLM(None) + conf["futureMessage"] = True def createMenu(self): self.submenu = wx.Menu() @@ -503,6 +515,13 @@ def createMenu(self): item ) + item = self.submenu.Append( + wx.ID_ANY, + _("BasiliskLLM"), + _("Open the BasiliskLLM website") + ) + gui.mainFrame.sysTrayIcon.Bind(wx.EVT_MENU, self.onBasiliskLLM, item) + addon_name = ADDON_INFO["name"] addon_version = ADDON_INFO["version"] self.submenu_item = gui.mainFrame.sysTrayIcon.menu.InsertMenu( @@ -545,6 +564,10 @@ def onCheckForUpdates(self, evt): ) updatecheck.update_last_check() + def onBasiliskLLM(self, evt): + url = "https://github.com/aaclause/basiliskLLM/" + os.startfile(url) + def terminate(self): gui.settingsDialogs.NVDASettingsDialog.categoryClasses.remove(SettingsDlg) gui.mainFrame.sysTrayIcon.menu.DestroyItem(self.submenu_item) diff --git a/addon/globalPlugins/openai/configspec.py b/addon/globalPlugins/openai/configspec.py index 62efab2..8ead4df 100644 --- a/addon/globalPlugins/openai/configspec.py +++ b/addon/globalPlugins/openai/configspec.py @@ -57,6 +57,7 @@ "speechResponseReceived": "boolean(default=True)", }, "renewClient": "boolean(default=False)", - "debug": "boolean(default=False)" + "debug": "boolean(default=False)", + "futureMessage": "boolean(default=False)", } config.conf.spec["OpenAI"] = confSpecs diff --git a/readme.md b/readme.md index c77e20c..fe559a4 100644 --- a/readme.md +++ b/readme.md @@ -1,3 +1,5 @@ +NOTE: The future of this add-on is [BasiliskLLM](https://github.com/aaclause/basiliskLLM/). (a standalone application and a minimal NVDA add-on). We highly recommend you consider using BasiliskLLM instead of this one. + # Open AI NVDA add-on This add-on designed to seamlessly integrate the capabilities of the Open AI API into your workflow. Whether you're looking to craft comprehensive text, translate passages with precision, concisely summarize documents, or even interpret and describe visual content, this add-on does it all with ease. @@ -27,8 +29,8 @@ You are now equipped to explore the features of the OpenAI NVDA add-on! ### The Main Dialog -The majority of the add-on's features can be easily accessed via a dialog box, which can be launched by pressing `NVDA+G`. -As an alternative, navigate to the "Open AI" submenu under the NVDA menu and select the "Main Dialog…" item. +The majority of the add-on's features can be easily accessed via a dialog box, which can be launched by pressing `NVDA+G`. +As an alternative, navigate to the "Open AI" submenu under the NVDA menu and select the "Main Dialog…" item. Within this dialog, you will be able to: - Initiate interactive conversations with the AI models for assistance or information gathering.