diff --git a/lua/gp/config.lua b/lua/gp/config.lua index ff819ad..0966f2a 100644 --- a/lua/gp/config.lua +++ b/lua/gp/config.lua @@ -17,12 +17,9 @@ local config = { -- openai_api_key: "sk-...", -- openai_api_key = os.getenv("env_name.."), openai_api_key = os.getenv("OPENAI_API_KEY"), - -- api endpoint (you can change this to azure endpoint) - openai_api_endpoint = "https://api.openai.com/v1/chat/completions", - --- openai_api_endpoint = "https://$URL.openai.azure.com/openai/deployments/{{model}}/chat/completions", -- at least one working provider is required - -- provider needs to have endpoint + -- to disable a provider set it to empty table like openai = {} providers = { -- secrets can be strings or tables with command and arguments -- secret = { "cat", "path_to/openai_api_key" }, @@ -31,7 +28,7 @@ local config = { -- secret = os.getenv("env_name.."), openai = { endpoint = "https://api.openai.com/v1/chat/completions", - secret = os.getenv("OPENAI_API_KEY"), + -- secret = os.getenv("OPENAI_API_KEY"), }, azure = { -- endpoint = "https://$URL.openai.azure.com/openai/deployments/{{model}}/chat/completions", @@ -66,12 +63,11 @@ local config = { -- agents = { { name = "ChatGPT4" }, ... }, agents = { { - provider = "openai", name = "ChatGPT4", chat = true, command = false, -- string with model name or table with model name and parameters - model = { model = "gpt-4-1106-preview", temperature = 1.1, top_p = 1 }, + model = { model = "gpt-4-turbo-preview", temperature = 1.1, top_p = 1 }, -- system prompt (use this to specify the persona/role of the AI) system_prompt = "You are a general AI assistant.\n\n" .. "The user provided the additional info about how they would like you to respond:\n\n" @@ -89,7 +85,7 @@ local config = { chat = true, command = false, -- string with model name or table with model name and parameters - model = { model = "gpt-3.5-turbo-1106", temperature = 1.1, top_p = 1 }, + model = { model = "gpt-3.5-turbo", temperature = 1.1, top_p = 1 }, -- system prompt (use this to specify the persona/role of the AI) system_prompt = "You are a general AI assistant.\n\n" .. "The user provided the additional info about how they would like you to respond:\n\n" @@ -125,7 +121,7 @@ local config = { chat = false, command = true, -- string with model name or table with model name and parameters - model = { model = "gpt-4-1106-preview", temperature = 0.8, top_p = 1 }, + model = { model = "gpt-4-turbo-preview", temperature = 0.8, top_p = 1 }, -- system prompt (use this to specify the persona/role of the AI) system_prompt = "You are an AI working as a code editor.\n\n" .. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n" @@ -137,7 +133,7 @@ local config = { chat = false, command = true, -- string with model name or table with model name and parameters - model = { model = "gpt-3.5-turbo-1106", temperature = 0.8, top_p = 1 }, + model = { model = "gpt-3.5-turbo", temperature = 0.8, top_p = 1 }, -- system prompt (use this to specify the persona/role of the AI) system_prompt = "You are an AI working as a code editor.\n\n" .. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n" @@ -365,6 +361,12 @@ local config = { local copy = vim.deepcopy(plugin) local key = copy.config.openai_api_key copy.config.openai_api_key = key:sub(1, 3) .. string.rep("*", #key - 6) .. key:sub(-3) + for provider, _ in pairs(copy.providers) do + local s = copy.providers[provider].secret + if s and type(s) == "string" then + copy.providers[provider].secret = s:sub(1, 3) .. string.rep("*", #s - 6) .. s:sub(-3) + end + end local plugin_info = string.format("Plugin structure:\n%s", vim.inspect(copy)) local params_info = string.format("Command params:\n%s", vim.inspect(params)) local lines = vim.split(plugin_info .. "\n" .. params_info, "\n") diff --git a/lua/gp/health.lua b/lua/gp/health.lua index 201143b..4b8d163 100644 --- a/lua/gp/health.lua +++ b/lua/gp/health.lua @@ -15,6 +15,7 @@ function M.check() vim.health.error("require('gp').setup() has not been called") end + --TODO: obsolete ---@diagnostic disable-next-line: undefined-field local api_key = gp.config.openai_api_key diff --git a/lua/gp/init.lua b/lua/gp/init.lua index 0e76736..1f51298 100644 --- a/lua/gp/init.lua +++ b/lua/gp/init.lua @@ -18,6 +18,16 @@ local deprecated = { command_prompt_prefix = "`command_prompt_prefix`\nPlease use `command_prompt_prefix_template`" .. " with support for \n`{{agent}}` variable so you know which agent is currently active", whisper_max_time = "`whisper_max_time`\nPlease use fully customizable `whisper_rec_cmd`", + + openai_api_endpoint = "`openai_api_endpoint`\n\n" + .. "If you're using the `https://api.openai.com/v1/chat/completions` endpoint,\n" + .. "just drop `openai_api_endpoint` in your config and you're done." + .. "\n\nOtherwise sorry for probably breaking your setup, " + .. "please use `endpoint` and `secret` fields in:\n\nproviders " + .. "= {\n openai = {\n endpoint = '...',\n secret = '...'\n }," + .. "\n -- azure = {...},\n -- copilot = {...},\n -- ollama = {...},\n},\n" + .. "\nThe `openai_api_key` is still supported for backwards compatibility,\n" + .. "and automatically converted to `providers.openai.secret` if the new config is not set.", } -------------------------------------------------------------------------------- @@ -739,8 +749,16 @@ M.setup = function(opts) opts[tbl] = opts[tbl] or {} for k, v in pairs(opts[tbl]) do - if tbl == "hooks" or tbl == "providers" then + if tbl == "hooks" then M[tbl][k] = v + elseif tbl == "providers" then + M[tbl][k] = M[tbl][k] or {} + for pk, pv in pairs(v) do + M[tbl][k][pk] = pv + end + if next(v) == nil then + M[tbl][k] = nil + end elseif tbl == "agents" or tbl == "image_agents" then M[tbl][v.name] = v end @@ -814,11 +832,19 @@ M.setup = function(opts) M._chat_agents = {} M._command_agents = {} for name, agent in pairs(M.agents) do - if agent.command then - table.insert(M._command_agents, name) + if not M.agents[name].provider then + M.agents[name].provider = "openai" end - if agent.chat then - table.insert(M._chat_agents, name) + + if M.providers[M.agents[name].provider] then + if agent.command then + table.insert(M._command_agents, name) + end + if agent.chat then + table.insert(M._chat_agents, name) + end + else + M.agents[name] = nil end end table.sort(M._chat_agents) @@ -888,13 +914,16 @@ M.setup = function(opts) for name, _ in pairs(M.providers) do M.resolve_secret(name) end - - -- M.resolve_secret(M.config, "openai_api_key") - -- M.valid_api_key() + if not M.providers.openai then + M.providers.openai = {} + M.resolve_secret("openai", function() + M.providers.openai = nil + end) + end end ---@provider string # provider name -function M.resolve_secret(provider) +function M.resolve_secret(provider, callback) local post_process = function() local p = M.providers[provider] if p.secret and type(p.secret) == "string" then @@ -904,6 +933,20 @@ function M.resolve_secret(provider) if provider == "copilot" then M.refresh_copilot_bearer() end + + -- backwards compatibility + if provider == "openai" then + M.config.openai_api_key = M.providers[provider].secret + end + + if callback then + callback() + end + end + + -- backwards compatibility + if provider == "openai" then + M.providers[provider].secret = M.providers[provider].secret or M.config.openai_api_key end local secret = M.providers[provider].secret @@ -951,7 +994,7 @@ function M.resolve_secret(provider) end end ---TODO: obsolete +-- TODO: obsolete M.valid_api_key = function() local api_key = M.config.openai_api_key @@ -1262,19 +1305,40 @@ M.query = function(buf, provider, payload, handler, on_exit) end end - -- try to replace model in endpoint (for azure) - local endpoint = M._H.template_replace(M.providers[provider].endpoint, "{{model}}", payload.model) + ---TODO: this could be moved to a separate function returning endpoint and headers + local endpoint = M.providers[provider].endpoint local bearer = M.providers[provider].secret local headers = {} + if provider == "copilot" and M._state.copilot_bearer then ---@diagnostic disable-next-line: undefined-field bearer = M._state.copilot_bearer.token or "" headers = { "-H", "editor-version: vscode/1.85.1", + "-H", + "Authorization: Bearer " .. bearer, } end + if provider == "openai" then + headers = { + "-H", + "Authorization: Bearer " .. bearer, + -- backwards compatibility + "-H", + "api-key: " .. bearer, + } + end + + if provider == "azure" then + headers = { + "-H", + "api-key: " .. bearer, + } + endpoint = M._H.template_replace(endpoint, "{{model}}", payload.model) + end + local curl_params = vim.deepcopy(M.config.curl_params or {}) local args = { "--no-buffer", @@ -1282,11 +1346,6 @@ M.query = function(buf, provider, payload, handler, on_exit) endpoint, "-H", "Content-Type: application/json", - -- api-key is for azure, authorization is for openai - "-H", - "Authorization: Bearer " .. bearer, - "-H", - "api-key: " .. M.config.openai_api_key, "-d", vim.json.encode(payload), --[[ "--doesnt_exist" ]] @@ -2523,20 +2582,24 @@ M.cmd.NextAgent = function() agent_list = M._command_agents end + local set_agent = function(agent_name) + if is_chat then + M._state.chat_agent = agent_name + M.info("Chat agent: " .. agent_name) + else + M._state.command_agent = agent_name + M.info("Command agent: " .. agent_name) + end + M.refresh_state() + end + for i, agent_name in ipairs(agent_list) do if agent_name == current_agent then - local next_agent = agent_list[i % #agent_list + 1] - if is_chat then - M._state.chat_agent = next_agent - M.info("Chat agent: " .. next_agent) - else - M._state.command_agent = next_agent - M.info("Command agent: " .. next_agent) - end - M.refresh_state() + set_agent(agent_list[i % #agent_list + 1]) return end end + set_agent(agent_list[1]) end ---@return table # { cmd_prefix, name, model, system_prompt }