update nvim

This commit is contained in:
kyasuda
2025-09-05 10:32:09 -07:00
parent 22b43c6ddf
commit b8b8d6ef09
16 changed files with 618 additions and 248 deletions

View File

@@ -8,98 +8,121 @@ return {
},
opts = {
adapters = {
copilot = function()
return require("codecompanion.adapters").extend("copilot", {
schema = {
name = "copilot",
opts = {
stream = true,
tools = true,
vision = true,
},
features = {
text = true,
tokens = true,
},
model = {
-- default = "claude-3.7-sonnet-thought",
-- default = "o3-mini",
-- default = "gemini-2.0-flash-001",
default = "gpt-4.1",
-- default = "gpt-4o",
-- default = "o3-mini-2025-01-31",
-- choices = {
-- ["o3-mini-2025-01-31"] = { opts = { can_reason = true } },
-- ["o1-2024-12-17"] = { opts = { can_reason = true } },
-- ["o1-mini-2024-09-12"] = { opts = { can_reason = true } },
-- "gpt-4o-2024-08-06",
-- "claude-3.7-sonnet-thought",
-- "claude-3.7-sonnet",
-- "claude-3.5-sonnet",
-- "gemini-2.0-flash-001",
-- {{{ HTTP
http = {
-- {{{ COPILOT
copilot = function()
return require("codecompanion.adapters").extend("copilot", {
schema = {
name = "copilot",
opts = {
stream = true,
tools = true,
vision = true,
},
features = {
text = true,
tokens = true,
},
model = {
-- default = "claude-3.7-sonnet-thought",
-- default = "o3-mini",
-- default = "gemini-2.0-flash-001",
default = "gpt-4.1",
-- default = "gpt-4o",
-- default = "o3-mini-2025-01-31",
-- choices = {
-- ["o3-mini-2025-01-31"] = { opts = { can_reason = true } },
-- ["o1-2024-12-17"] = { opts = { can_reason = true } },
-- ["o1-mini-2024-09-12"] = { opts = { can_reason = true } },
-- "gpt-4o-2024-08-06",
-- "claude-3.7-sonnet-thought",
-- "claude-3.7-sonnet",
-- "claude-3.5-sonnet",
-- "gemini-2.0-flash-001",
-- },
},
-- max_tokens = {
-- default = 65536,
-- },
},
-- max_tokens = {
-- default = 65536,
-- },
},
})
end,
llama_cpp = function()
return require("codecompanion.adapters").extend("openai_compatible", {
name = "llama.cpp",
formatted_name = "llama.cpp",
opts = {
stream = false,
},
schema = {
-- model = {
-- default = "qwen2.5-coder-14b-instruct",
-- choices = {
-- ["qwen2.5-coder-14b-instruct"] = { opts = { can_reason = true } },
-- ["/models/lmstudio-community/DeepSeek-R1-Distill-Qwen-7B-GGUF/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf"] = {
-- opts = { can_reason = true },
-- },
-- ["/models/lmstudio-community/Qwen2.5-7B-Instruct-1M-GGUF/Qwen2.5-7B-Instruct-1M-Q4_K_M.gguf"] = {
-- opts = { can_reason = true },
-- },
-- },
-- },
temperature = {
order = 2,
mapping = "parameters",
type = "number",
optional = true,
default = 0.2,
validate = function(n)
return n >= 0 and n <= 2, "Must be between 0 and 2"
end,
})
end,
-- }}}
-- {{{ LLAMA_CPP
llama_cpp = function()
return require("codecompanion.adapters").extend("openai_compatible", {
name = "llama.cpp",
formatted_name = "llama.cpp",
opts = {
stream = false,
},
},
env = {
url = "http://localhost:8080",
chat_url = "/v1/chat/completions",
},
})
end,
openrouter = function()
return require("codecompanion.adapters").extend("openai_compatible", {
env = {
url = "https://openrouter.ai/api",
api_key = "cmd:cat $HOME/.openrouterapikey",
chat_url = "/v1/chat/completions",
},
schema = {
model = {
default = "google/gemini-2.5-pro-exp-03-25:free",
-- default = "deepseek/deepseek-chat-v3-0324:free",
-- default = "google/gemini-2.0-flash-thinking-exp:free",
-- default = "deepseek/deepseek-r1-distill-qwen-32b:free",
-- default = "qwen/qwen-2.5-coder-32b-instruct:free",
schema = {
-- model = {
-- default = "qwen2.5-coder-14b-instruct",
-- choices = {
-- ["qwen2.5-coder-14b-instruct"] = { opts = { can_reason = true } },
-- ["/models/lmstudio-community/DeepSeek-R1-Distill-Qwen-7B-GGUF/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf"] = {
-- opts = { can_reason = true },
-- },
-- ["/models/lmstudio-community/Qwen2.5-7B-Instruct-1M-GGUF/Qwen2.5-7B-Instruct-1M-Q4_K_M.gguf"] = {
-- opts = { can_reason = true },
-- },
-- },
-- },
temperature = {
order = 2,
mapping = "parameters",
type = "number",
optional = true,
default = 0.2,
validate = function(n)
return n >= 0 and n <= 2, "Must be between 0 and 2"
end,
},
},
},
})
end,
env = {
url = "http://localhost:8080",
chat_url = "/v1/chat/completions",
},
})
end,
-- }}}
-- {{{ OPENROUTER
openrouter = function()
return require("codecompanion.adapters").extend("openai_compatible", {
env = {
url = "https://openrouter.ai/api",
api_key = "cmd:cat $HOME/.openrouterapikey",
chat_url = "/v1/chat/completions",
},
schema = {
model = {
default = "google/gemini-2.5-pro-exp-03-25:free",
-- default = "deepseek/deepseek-chat-v3-0324:free",
-- default = "google/gemini-2.0-flash-thinking-exp:free",
-- default = "deepseek/deepseek-r1-distill-qwen-32b:free",
-- default = "qwen/qwen-2.5-coder-32b-instruct:free",
},
},
})
end,
-- }}}
},
-- }}}
-- {{{ ACP
acp = {
gemini_cli = function()
return require("codecompanion.adapters").extend("gemini_cli", {
defaults = {
auth_method = "oauth-personal", -- "oauth-personal"|"gemini-api-key"|"vertex-ai"
mcpServers = {},
timeout = 20000, -- 20 seconds
},
})
end,
},
-- }}}
},
strategies = {
chat = {
@@ -212,6 +235,7 @@ return {
},
},
init = function()
require("plugins.codecompanion.fidget-spinner"):init()
require("utils.codecompanion.fidget-spinner"):init()
require("utils.codecompanion.extmarks").setup()
end,
}