Merge branch 'master' of gitea.suda.codes:sudacode/nvim

This commit is contained in:
sudacode 2025-04-10 21:58:03 -07:00
commit cc288092b8
Signed by: sudacode
SSH Key Fingerprint: SHA256:lT5C2bB398DcX6daCF/gYFNSTK3y+Du3oTGUnYzfTEw

View File

@ -15,18 +15,20 @@ return {
stream = true, stream = true,
}, },
model = { model = {
default = "claude-3.7-sonnet-thought", -- default = "claude-3.7-sonnet-thought",
-- default = "o3-mini",
default = "gemini-2.0-flash-001",
-- default = "o3-mini-2025-01-31", -- default = "o3-mini-2025-01-31",
choices = { -- choices = {
["o3-mini-2025-01-31"] = { opts = { can_reason = true } }, -- ["o3-mini-2025-01-31"] = { opts = { can_reason = true } },
["o1-2024-12-17"] = { opts = { can_reason = true } }, -- ["o1-2024-12-17"] = { opts = { can_reason = true } },
["o1-mini-2024-09-12"] = { opts = { can_reason = true } }, -- ["o1-mini-2024-09-12"] = { opts = { can_reason = true } },
"gpt-4o-2024-08-06", -- "gpt-4o-2024-08-06",
"claude-3.7-sonnet-thought", -- "claude-3.7-sonnet-thought",
"claude-3.7-sonnet", -- "claude-3.7-sonnet",
"claude-3.5-sonnet", -- "claude-3.5-sonnet",
"gemini-2.0-flash-001", -- "gemini-2.0-flash-001",
}, -- },
}, },
-- max_tokens = { -- max_tokens = {
-- default = 65536, -- default = 65536,
@ -42,18 +44,18 @@ return {
stream = false, stream = false,
}, },
schema = { schema = {
model = { -- model = {
default = "qwen2.5-coder-14b-instruct", -- default = "qwen2.5-coder-14b-instruct",
choices = { -- choices = {
["qwen2.5-coder-14b-instruct"] = { opts = { can_reason = true } }, -- ["qwen2.5-coder-14b-instruct"] = { opts = { can_reason = true } },
["/models/lmstudio-community/DeepSeek-R1-Distill-Qwen-7B-GGUF/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf"] = { -- ["/models/lmstudio-community/DeepSeek-R1-Distill-Qwen-7B-GGUF/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf"] = {
opts = { can_reason = true }, -- opts = { can_reason = true },
}, -- },
["/models/lmstudio-community/Qwen2.5-7B-Instruct-1M-GGUF/Qwen2.5-7B-Instruct-1M-Q4_K_M.gguf"] = { -- ["/models/lmstudio-community/Qwen2.5-7B-Instruct-1M-GGUF/Qwen2.5-7B-Instruct-1M-Q4_K_M.gguf"] = {
opts = { can_reason = true }, -- opts = { can_reason = true },
}, -- },
}, -- },
}, -- },
temperature = { temperature = {
order = 2, order = 2,
mapping = "parameters", mapping = "parameters",
@ -66,18 +68,45 @@ return {
}, },
}, },
env = { env = {
url = "http://localhost:8888", url = "http://localhost:8080",
chat_url = "/v1/chat/completions", chat_url = "/v1/chat/completions",
}, },
}) })
end, end,
openrouter = function()
return require("codecompanion.adapters").extend("openai_compatible", {
env = {
url = "https://openrouter.ai/api",
api_key = "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",
chat_url = "/v1/chat/completions",
},
schema = {
model = {
default = "google/gemini-2.5-pro-exp-03-25:free",
-- default = "deepseek/deepseek-chat-v3-0324:free",
-- default = "google/gemini-2.0-flash-thinking-exp:free",
-- default = "deepseek/deepseek-r1-distill-qwen-32b:free",
-- default = "qwen/qwen-2.5-coder-32b-instruct:free",
},
},
})
end,
}, },
strategies = { strategies = {
chat = { chat = {
adapter = "copilot", -- adapter = "copilot",
adapter = "openrouter",
roles = { roles = {
llm = function(adapter) llm = function(adapter)
return " Assistant (" .. adapter.formatted_name .. " - " .. adapter.parameters.model .. ")" if adapter.model == nil then
return " Assistant"
else
return " Assistant ("
.. adapter.formatted_name
.. " - "
.. adapter.parameters.model
.. ")"
end
end, end,
user = " User", user = " User",
}, },
@ -105,7 +134,8 @@ return {
}, },
}, },
inline = { inline = {
adapter = "copilot", -- adapter = "copilot",
adapter = "openrouter",
}, },
}, },
display = { display = {