nvim/lua/plugins/codecompanion.lua
2025-02-25 00:45:48 -08:00

171 lines
4.3 KiB
Lua

return {
"olimorris/codecompanion.nvim",
dependencies = {
"nvim-lua/plenary.nvim",
"nvim-treesitter/nvim-treesitter",
"j-hui/fidget.nvim",
},
opts = {
adapters = {
copilot = function()
return require("codecompanion.adapters").extend("copilot", {
schema = {
name = "copilot",
opts = {
stream = true,
},
model = {
default = "claude-3.7-sonnet-thought",
-- default = "o3-mini-2025-01-31",
choices = {
["o3-mini-2025-01-31"] = { opts = { can_reason = true } },
["o1-2024-12-17"] = { opts = { can_reason = true } },
["o1-mini-2024-09-12"] = { opts = { can_reason = true } },
"claude-3.5-sonnet",
"claude-3.7-sonnet",
["claude-3.7-sonnet-thought"] = {
opts = { can_reason = true, thinking = { type = "inline" } },
},
"gpt-4o-2024-08-06",
"gemini-2.0-flash-001",
},
},
-- max_tokens = {
-- default = 65536,
-- },
},
})
end,
llama_cpp = function()
return require("codecompanion.adapters").extend("openai_compatible", {
name = "llama.cpp",
formatted_name = "llama.cpp",
opts = {
stream = false,
},
schema = {
model = {
default = "qwen2.5-coder-14b-instruct",
choices = {
["qwen2.5-coder-14b-instruct"] = { opts = { can_reason = true } },
["/models/lmstudio-community/DeepSeek-R1-Distill-Qwen-7B-GGUF/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf"] = {
opts = { can_reason = true },
},
["/models/lmstudio-community/Qwen2.5-7B-Instruct-1M-GGUF/Qwen2.5-7B-Instruct-1M-Q4_K_M.gguf"] = {
opts = { can_reason = true },
},
},
},
temperature = {
order = 2,
mapping = "parameters",
type = "number",
optional = true,
default = 0.2,
validate = function(n)
return n >= 0 and n <= 2, "Must be between 0 and 2"
end,
},
},
env = {
url = "http://localhost:8888",
chat_url = "/v1/chat/completions",
},
})
end,
},
strategies = {
chat = {
adapter = "copilot",
roles = {
llm = function(adapter)
return " Assistant (" .. adapter.formatted_name .. " - " .. adapter.parameters.model .. ")"
end,
user = " User",
},
slash_commands = {
["file"] = {
opts = {
provider = "telescope",
},
},
["symbols"] = {
opts = {
provider = "telescope",
},
},
["buffer"] = {
opts = {
provider = "telescope",
},
},
["terminal"] = {
opts = {
provider = "telescope",
},
},
},
},
inline = {
adapter = "copilot",
},
},
display = {
action_palette = {
provider = "telescope",
width = 75,
heigth = 45,
},
chat = {
layout = "vertical",
border = "single",
intro_message = "Welcome to CodeCompanion ✨! Press ? for options",
show_header_separator = false, -- Show header separators in the chat buffer? Set this to false if you're using an external markdown formatting plugin
separator = "", -- The separator between the different messages in the chat buffer
show_references = true, -- Show references (from slash commands and variables) in the chat buffer?
show_settings = false, -- Show LLM settings at the top of the chat buffer?
show_token_count = true, -- Show the token count for each response?
start_in_insert_mode = false, -- Open the chat buffer in insert mode?
},
window = {
layout = "vertical",
position = nil,
border = "rounded",
height = 0.45,
width = 0.45,
relative = "editor",
opts = {
breakindent = true,
cursorcolumn = false,
cursorline = false,
foldcolumn = "0",
linebreak = true,
list = false,
numberwidth = 1,
signcolumn = "no",
spell = false,
wrap = true,
},
},
diff = {
enabled = true,
provider = "mini_diff",
},
---Customize how tokens are displayed
---@param tokens number
---@param adapter CodeCompanion.Adapter
---@return string
token_count = function(tokens, adapter)
return " (" .. tokens .. " tokens)"
end,
},
opts = {
log_level = "DEBUG",
-- log_level = "TRACE",
},
},
init = function()
require("plugins.codecompanion.fidget-spinner"):init()
end,
}