Skip to content

Commit

Permalink
feat: add lsp-ai
Browse files Browse the repository at this point in the history
  • Loading branch information
fredrikaverpil committed Jun 17, 2024
1 parent 6c1ee57 commit 094ae78
Show file tree
Hide file tree
Showing 2 changed files with 147 additions and 3 deletions.
4 changes: 1 addition & 3 deletions nvim-fredrik/lazy-lock.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
{
"ChatGPT.nvim": { "branch": "main", "commit": "df53728e05129278d6ea26271ec086aa013bed90" },
"CopilotChat.nvim": { "branch": "canary", "commit": "82923efe22b604cf9c0cad0bb2a74aa9247755ab" },
"FTerm.nvim": { "branch": "master", "commit": "d1320892cc2ebab472935242d9d992a2c9570180" },
"FixCursorHold.nvim": { "branch": "master", "commit": "1900f89dc17c603eec29960f57c00bd9ae696495" },
"LuaSnip": { "branch": "master", "commit": "50fcf17db7c75af80e6b6109acfbfb4504768780" },
Expand All @@ -16,7 +15,6 @@
"cmp_luasnip": { "branch": "master", "commit": "05a9ab28b53f71d1aece421ef32fee2cb857a843" },
"code_runner.nvim": { "branch": "main", "commit": "6c5bfe44a6c7523350cd706e6b3b8101166eed99" },
"conform.nvim": { "branch": "master", "commit": "797d1f622a23d4a21bb58218bdf5999a9beac4ef" },
"copilot.lua": { "branch": "master", "commit": "86537b286f18783f8b67bccd78a4ef4345679625" },
"crates.nvim": { "branch": "main", "commit": "8437522d12a8c523da2aee9db2979d070b2ecc33" },
"diffview.nvim": { "branch": "main", "commit": "4516612fe98ff56ae0415a259ff6361a89419b0a" },
"dressing.nvim": { "branch": "master", "commit": "e3714c8049b2243e792492c4149e4cc395c68eb9" },
Expand Down Expand Up @@ -57,7 +55,6 @@
"nvim-lspconfig": { "branch": "master", "commit": "bd7c76375a511994c9ca8d69441f134dc10ae3bd" },
"nvim-nio": { "branch": "master", "commit": "7969e0a8ffabdf210edd7978ec954a47a737bbcc" },
"nvim-notify": { "branch": "master", "commit": "d333b6f167900f6d9d42a59005d82919830626bf" },
"nvim-spectre": { "branch": "master", "commit": "ec67d4b5370094b923dfcf6b09b39142f2964861" },
"nvim-treesitter": { "branch": "master", "commit": "4d112977c706034370a7fed65f7c957ec268eaa2" },
"nvim-treesitter-context": { "branch": "master", "commit": "5efba33af0f39942e426340da7bc15d7dec16474" },
"nvim-ufo": { "branch": "main", "commit": "aa2e676af592b4e99c105d80d6eafd1afc215d99" },
Expand Down Expand Up @@ -90,6 +87,7 @@
"which-key.nvim": { "branch": "main", "commit": "0099511294f16b81c696004fa6a403b0ae61f7a0" },
"window-picker": { "branch": "main", "commit": "41cfaa428577c53552200a404ae9b3a0b5719706" },
"winshift.nvim": { "branch": "main", "commit": "37468ed6f385dfb50402368669766504c0e15583" },
"workspace-diagnostics.nvim": { "branch": "main", "commit": "0f0dad6cc3fbc2c9c2d850eb7611917cf66d4595" },
"yanky.nvim": { "branch": "main", "commit": "73215b77d22ebb179cef98e7e1235825431d10e4" },
"zen-mode.nvim": { "branch": "main", "commit": "78557d972b4bfbb7488e17b5703d25164ae64e6a" }
}
146 changes: 146 additions & 0 deletions nvim-fredrik/lua/plugins/lsp-ai.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
-- NOTE: instructions:
--
-- For local llama_cpp:
-- - https://github.com/SilasMarvin/lsp-ai/wiki/Installation
-- Then download model file, e.g. https://huggingface.co/Qwen/CodeQwen1.5-7B-Chat-GGUF/tree/main
-- and place it somewhere. Then update the `file_path` below.
--
-- See additional configs and possibilities:
-- https://github.com/SilasMarvin/lsp-ai/wiki/Configuration

--- Ollama FIM.
--- https://github.com/SilasMarvin/lsp-ai/wiki/Configuration#fim-2
local function fim_ollama()
local server = {
memory = {
file_store = {},
},
models = {
model1 = {
type = "ollama",
model = "codegemma",
},
},
completion = {
model = "model1",
parameters = {
fim = {
start = "<|fim▁begin|>",
middle = "<|fim▁hole|>",
-- end = "<|fim▁end|>",
},
max_context = 1024 * 2,
options = {
num_predict = 32,
},
},
},
}
server.completion.parameters.fim["end"] = "<|fim▁end|>"
return server
end

--- Llama_cpp FIM.
--- https://github.com/SilasMarvin/lsp-ai/wiki/Configuration#fim-1
local function fim_llama_cpp()
local server = {
memory = {
file_store = {},
},
models = {
model1 = {
type = "llama_cpp",
file_path = vim.fn.expand("~/code/public/CodeQwen1.5-7B-Chat-GGUF/codeqwen-1_5-7b-chat-q4_k_m.gguf"),
n_ctx = 1024 * 2,
n_gpu_layers = 500,
},
},
completion = {
model = "model1",
parameters = {
fim = {
start = "<|fim▁prefix|>",
middle = "<|fim▁suffix|>",
-- end = "<|fim▁middle|>",
},
max_context = 1024 * 2,
options = {
num_predict = 32,
},
},
},
}
server.completion.parameters.fim["end"] = "<|fim▁middle|>"
return server
end

-- Ollama completion.
-- https://github.com/SilasMarvin/lsp-ai/wiki/Configuration#completion-2
local function completion_ollama()
local server = {
memory = {
file_store = {},
},
models = {
model1 = {
type = "ollama",
model = "codegemma",
},
},
completion = {
model = "model1",
parameters = {
max_context = 1024 * 2,
options = {
num_predict = 32,
},
},
},
}
return server
end

-- Llama_cpp completion.
-- https://github.com/SilasMarvin/lsp-ai/wiki/Configuration#completion-1
local function completion_llama_cpp()
local server = {
memory = {
file_store = {},
},
models = {
model1 = {
type = "llama_cpp",
file_path = vim.fn.expand("~/code/public/CodeQwen1.5-7B-Chat-GGUF/codeqwen-1_5-7b-chat-q4_k_m.gguf"),
n_ctx = 1024 * 2,
n_gpu_layers = 500,
},
},
completion = {
model = "model1",
parameters = {
max_context = 1024 * 2,
max_tokens = 32,
},
},
}
return server
end

return {
{
-- Invoke with `:LSPAIComplete`
"SuperBo/lsp-ai.nvim",
enabled = false,
dependencies = { "neovim/nvim-lspconfig" },
opts = {
autostart = true,
-- server = fim_ollama(),
-- server = fim_llama_cpp(),
-- server = completion_ollama(),
server = completion_llama_cpp(),
},
config = function(_, opts)
require("lsp_ai").setup(opts)
end,
},
}

0 comments on commit 094ae78

Please sign in to comment.