fix chat gpt and change to doom-one

This commit is contained in:
ksyasuda 2023-08-13 01:29:34 -07:00
parent e582ebeab1
commit 0df26f6082
No known key found for this signature in database
3 changed files with 112 additions and 70 deletions

View File

@ -80,6 +80,6 @@ command! PS execute ":PackerSync"
" set termguicolors
" colorscheme doom-one
" colorscheme onedark
colorscheme catppuccin
" colorscheme catppuccin
highlight Pmenu ctermfg=white ctermbg=black gui=NONE guifg=white guibg=#282C34
highlight PmenuSel guifg=purple guibg=red

View File

@ -37,7 +37,7 @@ require('packer').startup(function(use)
},
suggestion = {
enabled = false,
auto_trigger = true,
auto_trigger = false,
debounce = 75,
keymap = {
accept = "<C-l>",
@ -180,19 +180,19 @@ require('packer').startup(function(use)
use 'williamboman/nvim-lsp-installer'
use { 'Mofiqul/dracula.nvim' }
use {
use({
'NTBBloodbath/doom-one.nvim',
setup = function()
-- Add color to cursor
vim.g.doom_one_cursor_coloring = false
-- Set :terminal colors
vim.g.doom_one_terminal_colors = false
vim.g.doom_one_terminal_colors = true
-- Enable italic comments
vim.g.doom_one_italic_comments = true
vim.g.doom_one_italic_comments = false
-- Enable TS support
vim.g.doom_one_enable_treesitter = true
-- Color whole diagnostic text or only underline
vim.g.doom_one_diagnostics_text_color = true
vim.g.doom_one_diagnostics_text_color = false
-- Enable transparent background
vim.g.doom_one_transparent_background = false
@ -201,24 +201,31 @@ require('packer').startup(function(use)
vim.g.doom_one_pumblend_transparency = 20
-- Plugins integration
vim.g.doom_one_plugin_neorg = false
vim.g.doom_one_plugin_neorg = true
vim.g.doom_one_plugin_barbar = false
vim.g.doom_one_plugin_telescope = true
vim.g.doom_one_plugin_telescope = false
vim.g.doom_one_plugin_neogit = true
vim.g.doom_one_plugin_nvim_tree = true
vim.g.doom_one_plugin_dashboard = true
vim.g.doom_one_plugin_startify = false
vim.g.doom_one_plugin_startify = true
vim.g.doom_one_plugin_whichkey = true
vim.g.doom_one_plugin_indent_blankline = true
vim.g.doom_one_plugin_vim_illuminate = false
vim.g.doom_one_plugin_lspsaga = true
vim.g.doom_one_plugin_vim_illuminate = true
vim.g.doom_one_plugin_lspsaga = false
end,
config = function()
vim.cmd("colorscheme doom-one")
vim.cmd(
"highlight Pmenu ctermfg=white ctermbg=black gui=NONE guifg=white guibg=#282C34")
vim.cmd("highlight PmenuSel guifg=purple guibg=red")
end
}
})
use { 'olimorris/onedarkpro.nvim' }
use { 'projekt0n/github-nvim-theme' }
use({
"jackMort/ChatGPT.nvim",
commit = "24bcca7",
requires = {
"MunifTanjim/nui.nvim", "nvim-lua/plenary.nvim",
"nvim-telescope/telescope.nvim"

View File

@ -1,77 +1,112 @@
require("chatgpt").setup({
{
welcome_message = "ChatGPT is ready!",
loading_text = "loading",
question_sign = "", -- you can use emoji if you want e.g. 🙂
answer_sign = "", -- 🤖
max_line_length = 120,
yank_register = "+",
chat_layout = {
relative = "editor",
position = "50%",
size = {
height = "80%",
width = "80%",
api_key_cmd = "cat /home/sudacode/.config/openai/apikey",
yank_register = "+",
edit_with_instructions = {
diff = false,
keymaps = {
close = "<C-c>",
accept = "<C-y>",
toggle_diff = "<C-d>",
toggle_settings = "<C-o>",
cycle_windows = "<Tab>",
use_output_as_input = "<C-i>"
}
},
chat = {
welcome_message = WELCOME_MESSAGE,
loading_text = "Loading, please wait ...",
question_sign = "",
answer_sign = "",
max_line_length = 120,
sessions_window = {
border = { style = "rounded", text = { top = " Sessions " } },
win_options = {
winhighlight = "Normal:Normal,FloatBorder:FloatBorder"
}
},
},
settings_window = {
keymaps = {
close = { "<C-c>" },
yank_last = "<C-y>",
yank_last_code = "<C-k>",
scroll_up = "<C-u>",
scroll_down = "<C-d>",
new_session = "<C-n>",
cycle_windows = "<Tab>",
cycle_modes = "<C-f>",
select_session = "<Space>",
rename_session = "r",
delete_session = "d",
draft_message = "<C-d>",
toggle_settings = "<C-o>",
toggle_message_role = "<C-r>",
toggle_system_role_open = "<C-s>",
stop_generating = "<C-x>"
}
},
popup_layout = {
default = "center",
center = { width = "80%", height = "80%" },
right = { width = "30%", width_settings_open = "50%" }
},
popup_window = {
border = {
style = "rounded",
text = {
top = " Settings ",
},
highlight = "FloatBorder",
style = "rounded",
text = { top = " ChatGPT " }
},
},
chat_window = {
filetype = "chatgpt",
win_options = {
wrap = true,
linebreak = true,
foldcolumn = "1",
winhighlight = "Normal:Normal,FloatBorder:FloatBorder"
},
buf_options = { filetype = "markdown" }
},
system_window = {
border = {
highlight = "FloatBorder",
style = "rounded",
text = {
top = " ChatGPT ",
},
highlight = "FloatBorder",
style = "rounded",
text = { top = " SYSTEM " }
},
},
chat_input = {
win_options = {
wrap = true,
linebreak = true,
foldcolumn = "2",
winhighlight = "Normal:Normal,FloatBorder:FloatBorder"
}
},
popup_input = {
prompt = "",
border = {
highlight = "FloatBorder",
style = "rounded",
text = {
top_align = "center",
top = " Prompt ",
},
highlight = "FloatBorder",
style = "rounded",
text = { top_align = "center", top = " Prompt " }
},
},
openai_params = {
win_options = { winhighlight = "Normal:Normal,FloatBorder:FloatBorder" },
submit = "<C-Enter>",
submit_n = "<Enter>",
max_visible_lines = 20
},
settings_window = {
border = { style = "rounded", text = { top = " Settings " } },
win_options = { winhighlight = "Normal:Normal,FloatBorder:FloatBorder" }
},
openai_params = {
model = "gpt-3.5-turbo",
frequency_penalty = 0,
presence_penalty = 0,
max_tokens = 300,
temperature = 0,
top_p = 1,
n = 1,
},
openai_edit_params = {
n = 1
},
openai_edit_params = {
model = "code-davinci-edit-001",
temperature = 0,
top_p = 1,
n = 1,
},
keymaps = {
close = { "<C-c>" },
submit = "<C-Enter>",
yank_last = "<C-y>",
yank_last_code = "<C-k>",
scroll_up = "<C-u>",
scroll_down = "<C-d>",
toggle_settings = "<C-o>",
new_session = "<C-n>",
cycle_windows = "<Tab>",
-- in the Sessions pane
select_session = "<Space>",
rename_session = "r",
delete_session = "d",
},
}
n = 1
},
actions_paths = {},
show_quickfixes_cmd = "Trouble quickfix",
predefined_chat_gpt_prompts = "https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv"
})