diff --git a/lua/plugins/codecompanion.lua b/lua/plugins/codecompanion.lua index 1c8aec3..aee4679 100644 --- a/lua/plugins/codecompanion.lua +++ b/lua/plugins/codecompanion.lua @@ -15,18 +15,20 @@ return { stream = true, }, model = { - default = "claude-3.7-sonnet-thought", + -- default = "claude-3.7-sonnet-thought", + -- default = "o3-mini", + default = "gemini-2.0-flash-001", -- default = "o3-mini-2025-01-31", - choices = { - ["o3-mini-2025-01-31"] = { opts = { can_reason = true } }, - ["o1-2024-12-17"] = { opts = { can_reason = true } }, - ["o1-mini-2024-09-12"] = { opts = { can_reason = true } }, - "gpt-4o-2024-08-06", - "claude-3.7-sonnet-thought", - "claude-3.7-sonnet", - "claude-3.5-sonnet", - "gemini-2.0-flash-001", - }, + -- choices = { + -- ["o3-mini-2025-01-31"] = { opts = { can_reason = true } }, + -- ["o1-2024-12-17"] = { opts = { can_reason = true } }, + -- ["o1-mini-2024-09-12"] = { opts = { can_reason = true } }, + -- "gpt-4o-2024-08-06", + -- "claude-3.7-sonnet-thought", + -- "claude-3.7-sonnet", + -- "claude-3.5-sonnet", + -- "gemini-2.0-flash-001", + -- }, }, -- max_tokens = { -- default = 65536, @@ -42,18 +44,18 @@ return { stream = false, }, schema = { - model = { - default = "qwen2.5-coder-14b-instruct", - choices = { - ["qwen2.5-coder-14b-instruct"] = { opts = { can_reason = true } }, - ["/models/lmstudio-community/DeepSeek-R1-Distill-Qwen-7B-GGUF/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf"] = { - opts = { can_reason = true }, - }, - ["/models/lmstudio-community/Qwen2.5-7B-Instruct-1M-GGUF/Qwen2.5-7B-Instruct-1M-Q4_K_M.gguf"] = { - opts = { can_reason = true }, - }, - }, - }, + -- model = { + -- default = "qwen2.5-coder-14b-instruct", + -- choices = { + -- ["qwen2.5-coder-14b-instruct"] = { opts = { can_reason = true } }, + -- ["/models/lmstudio-community/DeepSeek-R1-Distill-Qwen-7B-GGUF/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf"] = { + -- opts = { can_reason = true }, + -- }, + -- ["/models/lmstudio-community/Qwen2.5-7B-Instruct-1M-GGUF/Qwen2.5-7B-Instruct-1M-Q4_K_M.gguf"] = { + -- opts = { can_reason = true }, + -- }, + -- }, + -- }, temperature = { order = 2, mapping = "parameters", @@ -66,18 +68,45 @@ return { }, }, env = { - url = "http://localhost:8888", + url = "http://localhost:8080", chat_url = "/v1/chat/completions", }, }) end, + openrouter = function() + return require("codecompanion.adapters").extend("openai_compatible", { + env = { + url = "https://openrouter.ai/api", + api_key = "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", + chat_url = "/v1/chat/completions", + }, + schema = { + model = { + default = "google/gemini-2.5-pro-exp-03-25:free", + -- default = "deepseek/deepseek-chat-v3-0324:free", + -- default = "google/gemini-2.0-flash-thinking-exp:free", + -- default = "deepseek/deepseek-r1-distill-qwen-32b:free", + -- default = "qwen/qwen-2.5-coder-32b-instruct:free", + }, + }, + }) + end, }, strategies = { chat = { - adapter = "copilot", + -- adapter = "copilot", + adapter = "openrouter", roles = { llm = function(adapter) - return " Assistant (" .. adapter.formatted_name .. " - " .. adapter.parameters.model .. ")" + if adapter.model == nil then + return " Assistant" + else + return " Assistant (" + .. adapter.formatted_name + .. " - " + .. adapter.parameters.model + .. ")" + end end, user = " User", }, @@ -105,7 +134,8 @@ return { }, }, inline = { - adapter = "copilot", + -- adapter = "copilot", + adapter = "openrouter", }, }, display = {