Skip to content

Commit

Permalink
chore: wip backup + lmstudio test
Browse files Browse the repository at this point in the history
  • Loading branch information
Robitx committed Jan 31, 2024
1 parent fc0b9cb commit 5a1916e
Show file tree
Hide file tree
Showing 2 changed files with 94 additions and 16 deletions.
57 changes: 55 additions & 2 deletions lua/gp/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,11 @@ local config = {
},
},
ollama = {
-- endpoint = "http://localhost:8000/v1/chat/completions",
endpoint = "http://localhost:11434/api/chat",
},
lmsudio = {
endpoint = "http://localhost:1234/v1/chat/completions"
}
},

-- prefix for all commands
Expand Down Expand Up @@ -115,6 +118,38 @@ local config = {
.. "- Don't elide any code from your output if the answer requires coding.\n"
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "ollama",
name = "ChatOllama",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = {
model = "mistral:7b-instruct-v0.2-q4_K_M",
temperature = 1.97,
top_p = 1,
num_ctx = 8192,
min_p = 0.05,
},
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.",
},
{
provider = "lmsudio",
name = "ChatLMStudio",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = {
model = "dummy",
temperature = 0.97,
top_p = 1,
num_ctx = 8192,
min_p = 0.05,
},
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.",
},
{
provider = "openai",
name = "CodeGPT4",
Expand Down Expand Up @@ -151,6 +186,25 @@ local config = {
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "ollama",
name = "CodeOllamaDeepSeek",
chat = false,
command = true,
-- string with the Copilot engine name or table with engine name and parameters if applicable
model = {
model = "mistral:7b-instruct-v0.2-q4_K_M",
temperature = 1.9,
top_p = 1,
num_ctx = 8192,
min_p = 0.05,
},
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are an AI working as a code editor providing answers.\n\n"
.. "Use 4 SPACES FOR INDENTATION.\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
},

-- directory for storing chat files
Expand All @@ -166,7 +220,6 @@ local config = {
chat_topic_gen_prompt = "Summarize the topic of our conversation above"
.. " in two or three words. Respond only with those words.",
-- chat topic model (string with model name or table with model name and parameters)
chat_topic_gen_model = "gpt-3.5-turbo-16k",
-- explicitly confirm deletion of a chat file
chat_confirm_delete = true,
-- conceal model parameters in chat
Expand Down
53 changes: 39 additions & 14 deletions lua/gp/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -1142,10 +1142,10 @@ end
---@param messages table
---@param model string | table | nil
---@param default_model string | table
M.prepare_payload = function(messages, model, default_model)
---@param provider string | nil
M.prepare_payload = function(messages, model, default_model, provider)
model = model or default_model

-- if model is a string
if type(model) == "string" then
return {
model = model,
Expand All @@ -1154,7 +1154,23 @@ M.prepare_payload = function(messages, model, default_model)
}
end

-- if model is a table
if provider == "ollama" then
local options = {}
for k, v in pairs(model) do
if k ~= "provider" and k ~= "model" then
options[k] = v
end
end
options.temperature = math.max(0, math.min(2, options.temperature or 1))
options.top_p = math.max(0, math.min(1, options.top_p or 1))
return {
model = model.model,
stream = true,
messages = messages,
options = options,
}
end

return {
model = model.model,
stream = true,
Expand Down Expand Up @@ -1198,7 +1214,7 @@ end
-- gpt query
---@param buf number | nil # buffer number
---@param provider string # provider name
---@param payload table # payload for openai api
---@param payload table # payload for api
---@param handler function # response handler
---@param on_exit function | nil # optional on_exit handler
M.query = function(buf, provider, payload, handler, on_exit)
Expand Down Expand Up @@ -1248,16 +1264,25 @@ M.query = function(buf, provider, payload, handler, on_exit)
qt.raw_response = qt.raw_response .. line .. "\n"
end
line = line:gsub("^data: ", "")
local content = ""
if line:match("choices") and line:match("delta") and line:match("content") then
line = vim.json.decode(line)
if line.choices[1] and line.choices[1].delta and line.choices[1].delta.content then
local content = line.choices[1].delta.content
if content and type(content) == "string" then
qt.response = qt.response .. content
handler(qid, content)
end
content = line.choices[1].delta.content
end
end

if provider == "ollama" and line:match("message") and line:match("content") then
line = vim.json.decode(line)
if line.message and line.message.content then
content = line.message.content
end
end

if content and type(content) == "string" then
qt.response = qt.response .. content
handler(qid, content)
end
end
end

Expand All @@ -1269,7 +1294,7 @@ M.query = function(buf, provider, payload, handler, on_exit)
end

if err then
M.error("OpenAI query stdout error: " .. vim.inspect(err))
M.error(qt.provider .. " query stdout error: " .. vim.inspect(err))
elseif chunk then
-- add the incoming chunk to the buffer
buffer = buffer .. chunk
Expand All @@ -1289,7 +1314,7 @@ M.query = function(buf, provider, payload, handler, on_exit)
end

if qt.response == "" then
M.error("OpenAI query response is empty: \n" .. vim.inspect(qt.raw_response))
M.error(qt.provider .. " response is empty: \n" .. vim.inspect(qt.raw_response))
end

-- optional on_exit handler
Expand Down Expand Up @@ -2135,7 +2160,7 @@ M.chat_respond = function(params)
M.query(
buf,
agent.provider,
M.prepare_payload(messages, headers.model, agent.model),
M.prepare_payload(messages, headers.model, agent.model, agent.provider),
M.create_handler(buf, win, M._H.last_content_line(buf), true, "", not M.config.chat_free_cursor),
vim.schedule_wrap(function(qid)
local qt = M.get_query(qid)
Expand Down Expand Up @@ -2178,7 +2203,7 @@ M.chat_respond = function(params)
M.query(
nil,
agent.provider,
M.prepare_payload(messages, nil, M.config.chat_topic_gen_model),
M.prepare_payload(messages, nil, agent.model, agent.provider),
topic_handler,
vim.schedule_wrap(function()
-- get topic from invisible buffer
Expand Down Expand Up @@ -2947,7 +2972,7 @@ M.Prompt = function(params, target, prompt, model, template, system_template, wh
M.query(
buf,
provider,
M.prepare_payload(messages, model, agent.model),
M.prepare_payload(messages, model, agent.model, agent.provider),
handler,
vim.schedule_wrap(function(qid)
on_exit(qid)
Expand Down

0 comments on commit 5a1916e

Please sign in to comment.