Skip to content

Commit

Permalink
feat: support for perplexity and anthropic
Browse files Browse the repository at this point in the history
  • Loading branch information
Robitx committed Mar 24, 2024
1 parent 2777776 commit c130cf2
Show file tree
Hide file tree
Showing 2 changed files with 132 additions and 1 deletion.
77 changes: 77 additions & 0 deletions lua/gp/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,14 @@ local config = {
endpoint = "https://generativelanguage.googleapis.com/v1beta/models/{{model}}:streamGenerateContent?key={{secret}}",
secret = os.getenv("GOOGLEAI_API_KEY"),
},
pplx = {
endpoint = "https://api.perplexity.ai/chat/completions",
secret = os.getenv("PPLX_API_KEY"),
},
anthropic = {
endpoint = "https://api.anthropic.com/v1/messages",
secret = os.getenv("ANTHROPIC_API_KEY"),
},
},

-- prefix for all commands
Expand Down Expand Up @@ -140,6 +148,42 @@ local config = {
.. "- Don't elide any code from your output if the answer requires coding.\n"
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "pplx",
name = "ChatPerplexityMixtral",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = { model = "mixtral-8x7b-instruct", temperature = 1.1, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.\n\n"
.. "The user provided the additional info about how they would like you to respond:\n\n"
.. "- If you're unsure don't guess and say you don't know instead.\n"
.. "- Ask question if you need clarification to provide better answer.\n"
.. "- Think deeply and carefully from first principles step by step.\n"
.. "- Zoom out first to see the big picture and then zoom in to details.\n"
.. "- Use Socratic method to improve your thinking and coding skills.\n"
.. "- Don't elide any code from your output if the answer requires coding.\n"
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "anthropic",
name = "ChatClaude-3-Haiku",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = { model = "claude-3-haiku-20240307", temperature = 0.8, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.\n\n"
.. "The user provided the additional info about how they would like you to respond:\n\n"
.. "- If you're unsure don't guess and say you don't know instead.\n"
.. "- Ask question if you need clarification to provide better answer.\n"
.. "- Think deeply and carefully from first principles step by step.\n"
.. "- Zoom out first to see the big picture and then zoom in to details.\n"
.. "- Use Socratic method to improve your thinking and coding skills.\n"
.. "- Don't elide any code from your output if the answer requires coding.\n"
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "ollama",
name = "ChatOllama",
Expand Down Expand Up @@ -204,6 +248,39 @@ local config = {
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "googleai",
name = "CodeGemini",
chat = false,
command = true,
-- string with model name or table with model name and parameters
model = { model = "gemini-pro", temperature = 0.8, top_p = 1 },
system_prompt = "You are an AI working as a code editor.\n\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "pplx",
name = "CodePerplexityMixtral",
chat = false,
command = true,
-- string with model name or table with model name and parameters
model = { model = "mixtral-8x7b-instruct", temperature = 0.8, top_p = 1 },
system_prompt = "You are an AI working as a code editor.\n\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "anthropic",
name = "CodeClaude-3-Haiku",
chat = false,
command = true,
-- string with model name or table with model name and parameters
model = { model = "claude-3-haiku-20240307", temperature = 0.8, top_p = 1 },
system_prompt = "You are an AI working as a code editor.\n\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "ollama",
name = "CodeOllamaDeepSeek",
Expand Down
56 changes: 55 additions & 1 deletion lua/gp/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -1211,7 +1211,7 @@ M.prepare_payload = function(messages, model, default_model, provider)
},
generationConfig = {
temperature = math.max(0, math.min(2, model.temperature or 1)),
maxOutputTokens = model.num_ctx or 8192,
maxOutputTokens = model.max_tokens or 8192,
topP = math.max(0, math.min(1, model.top_p or 1)),
topK = model.top_k or 100,
},
Expand All @@ -1220,6 +1220,30 @@ M.prepare_payload = function(messages, model, default_model, provider)
return payload
end

if provider == "anthropic" then
local system = ""
local i = 1
while i < #messages do
if messages[i].role == "system" then
system = system .. messages[i].content .. "\n"
table.remove(messages, i)
else
i = i + 1
end
end

local payload = {
model = model.model,
stream = true,
messages = messages,
system = system,
max_tokens = model.max_tokens or 4096,
temperature = math.max(0, math.min(2, model.temperature or 1)),
top_p = math.max(0, math.min(1, model.top_p or 1)),
}
return payload
end

return {
model = model.model,
stream = true,
Expand Down Expand Up @@ -1321,6 +1345,18 @@ M.query = function(buf, provider, payload, handler, on_exit)
end
end

if qt.provider == "anthropic" and line:match('"text":') then
if line:match("content_block_start") or line:match("content_block_delta") then
line = vim.json.decode(line)
if line.delta and line.delta.text then
content = line.delta.text
end
if line.content_block and line.content_block.text then
content = line.content_block.text
end
end
end

if qt.provider == "googleai" then
if line:match('"text":') then
content = vim.json.decode("{" .. line .. "}").text
Expand Down Expand Up @@ -1405,13 +1441,31 @@ M.query = function(buf, provider, payload, handler, on_exit)
}
end

if provider == "pplx" then
headers = {
"-H",
"Authorization: Bearer " .. bearer,
}
end

if provider == "googleai" then
headers = {}
endpoint = M._H.template_replace(endpoint, "{{secret}}", bearer)
endpoint = M._H.template_replace(endpoint, "{{model}}", payload.model)
payload.model = nil
end

if provider == "anthropic" then
headers = {
"-H",
"x-api-key: " .. bearer,
"-H",
"anthropic-version: 2023-06-01",
"-H",
"anthropic-beta: messages-2023-12-15",
}
end

if provider == "azure" then
headers = {
"-H",
Expand Down

0 comments on commit c130cf2

Please sign in to comment.