Skip to content

Commit

Permalink
Add Parrot and fix other LLM commands for selections
Browse files Browse the repository at this point in the history
  • Loading branch information
utensil committed Nov 13, 2024
1 parent d0e64c5 commit 0afd862
Show file tree
Hide file tree
Showing 2 changed files with 301 additions and 9 deletions.
9 changes: 9 additions & 0 deletions trees/uts-002F.tree
Original file line number Diff line number Diff line change
Expand Up @@ -378,6 +378,15 @@ It's very important to stay in the flow and Zen mode as much as possible. Split
- Code Companion
- `<localleader>+a` to toggle the inline chat (works also for selection)
- `<localleader>+z` to toggle the chat sidebar
- Parrot
- `<leader>+pr` to rewrite the selection
- `<leader>+pp` to implement the selection
- `<leader>+pn` to start a new chat, optionally with the selection
- select and `:Prt` + `<Tab>` to select advanced prompts
- `:PrtInfo` to inspect configurations
- `:PrtModel` to select the model
- I've configured it to use a buffer to input the prompt, escape twice to send it
- it seems unable to stop generation sometimes, use `:PrtChatStop` to stop
- Avante (deprecated for now)
- `<leader>+aa` to open the chat, use `a` to accept individual suggestions, or `A` to accept all
- select code and `<leader>+ae` to modify code
Expand Down
301 changes: 292 additions & 9 deletions uts-plugins.lua
Original file line number Diff line number Diff line change
Expand Up @@ -608,37 +608,37 @@ local plugins = {
{
"<leader>ae",
mode = "x",
"<cmd>CopilotChatExplain<cr>",
"<cmd>'<,'>CopilotChatExplain<cr>",
desc = "CopilotChat - Explain code",
},
{
"<leader>at",
mode = "x",
"<cmd>CopilotChatTests<cr>",
"<cmd>'<,'>CopilotChatTests<cr>",
desc = "CopilotChat - Generate tests",
},
{
"<leader>ar",
mode = "x",
"<cmd>CopilotChatReview<cr>",
"<cmd>'<,'>CopilotChatReview<cr>",
desc = "CopilotChat - Review code",
},
{
"<leader>aR",
mode = "x",
"<cmd>CopilotChatRefactor<cr>",
"<cmd>'<,'>CopilotChatRefactor<cr>",
desc = "CopilotChat - Refactor code",
},
{
"<leader>an",
mode = "x",
"<cmd>CopilotChatBetterNamings<cr>",
"<cmd>'<,'>CopilotChatBetterNamings<cr>",
desc = "CopilotChat - Better Naming",
},
{
"<leader>ao",
mode = "x",
"<cmd>CopilotChatOptimize<cr>",
"<cmd>'<,'>CopilotChatOptimize<cr>",
desc = "CopilotChat - Optimize code",
},
{ "<leader>ad", "<cmd>CopilotChatDebugInfo<cr>", desc = "CopilotChat - Debug Info" },
Expand Down Expand Up @@ -715,14 +715,297 @@ local plugins = {
keys = {
-- use <C-z> to trigger CodeCompanion for both n and v
{ "<localleader>z", mode = "n", "<cmd>CodeCompanion<cr>", desc = "CodeCompanion - Inline" },
{ "<localleader>z", mode = "v", "<cmd>CodeCompanion<cr>", desc = "CodeCompanion - Inline" },
{ "<localleader>z", mode = "v", "<cmd>'<,'>CodeCompanion<cr>", desc = "CodeCompanion - Inline" },
{ "<C-a>", mode = "n", "<cmd>CodeCompanionActions<cr>", desc = "CodeCompanion - Actions" },
{ "<C-a>", mode = "v", "<cmd>CodeCompanionActions<cr>", desc = "CodeCompanion - Actions" },
{ "<C-a>", mode = "v", "<cmd>'<,'>CodeCompanionActions<cr>", desc = "CodeCompanion - Actions" },
{ "<localleader>a", mode = "n", "<cmd>CodeCompanionChat Toggle<cr>", desc = "CodeCompanion - Chat Toggle" },
{ "<localleader>a", mode = "v", "<cmd>CodeCompanionChat Toggle<cr>", desc = "CodeCompanion - Chat Toggle" },
{
"<localleader>a",
mode = "v",
"<cmd>'<,'>CodeCompanionChat Toggle<cr>",
desc = "CodeCompanion - Chat Toggle",
},
{ "ga", mode = "v", "<cmd>CodeCompanionChat Add<cr>", desc = "CodeCompanion - Chat Add" },
},
},
{
"frankroeder/parrot.nvim",
dependencies = { "ibhagwan/fzf-lua", "nvim-lua/plenary.nvim" }, -- "rcarriga/nvim-notify" },
-- optionally include "rcarriga/nvim-notify" for beautiful notifications
event = "VeryLazy",
lazy = false,
config = function(_, opts)
-- require("notify").setup {
-- background_colour = "#000000",
-- render = "compact",
-- -- top_down = false,
-- }
require("parrot").setup(opts)
end,
opts = {
-- Providers must be explicitly added to make them available.
providers = {
-- anthropic = {
-- api_key = os.getenv "ANTHROPIC_API_KEY",
-- },
-- gemini = {
-- api_key = os.getenv "GEMINI_API_KEY",
-- },
-- groq = {
-- api_key = os.getenv "GROQ_API_KEY",
-- },
-- mistral = {
-- api_key = os.getenv "MISTRAL_API_KEY",
-- },
-- pplx = {
-- api_key = os.getenv "PERPLEXITY_API_KEY",
-- },
-- -- provide an empty list to make provider available (no API key required)
-- ollama = {},
-- openai = {
-- api_key = os.getenv "OPENAI_API_KEY",
-- },
github = {
api_key = os.getenv "GITHUB_TOKEN",
},
-- nvidia = {
-- api_key = os.getenv "NVIDIA_API_KEY",
-- },
-- xai = {
-- api_key = os.getenv "XAI_API_KEY",
-- },
},
user_input_ui = "buffer",
online_model_selection = true,
command_auto_select_response = true,
enable_spinner = false,
hooks = {
Complete = function(prt, params)
local template = [[
I have the following code from {{filename}}:
```{{filetype}}
{{selection}}
```
Please finish the code above carefully and logically.
Respond just with the snippet of code that should be inserted."
]]
local model_obj = prt.get_model "command"
prt.Prompt(params, prt.ui.Target.append, model_obj, nil, template)
end,
CompleteFullContext = function(prt, params)
local template = [[
I have the following code from {{filename}}:
```{{filetype}}
{filecontent}}
```
Please look at the following section specifically:
```{{filetype}}
{{selection}}
```
Please finish the code above carefully and logically.
Respond just with the snippet of code that should be inserted.
]]
local model_obj = prt.get_model "command"
prt.Prompt(params, prt.ui.Target.append, model_obj, nil, template)
end,
CompleteMultiContext = function(prt, params)
local template = [[
I have the following code from {{filename}} and other realted files:
```{{filetype}}
{{multifilecontent}}
```
Please look at the following section specifically:
```{{filetype}}
{{selection}}
```
Please finish the code above carefully and logically.
Respond just with the snippet of code that should be inserted.
]]
local model_obj = prt.get_model "command"
prt.Prompt(params, prt.ui.Target.append, model_obj, nil, template)
end,
Explain = function(prt, params)
local template = [[
Your task is to take the code snippet from {{filename}} and explain it with gradually increasing complexity.
Break down the code's functionality, purpose, and key components.
The goal is to help the reader understand what the code does and how it works.
```{{filetype}}
{{selection}}
```
Use the markdown format with codeblocks and inline code.
Explanation of the code above:
]]
local model = prt.get_model "command"
prt.logger.info("Explaining selection with model: " .. model.name)
prt.Prompt(params, prt.ui.Target.new, model, nil, template)
end,
FixBugs = function(prt, params)
local template = [[
You are an expert in {{filetype}}.
Fix bugs in the below code from {{filename}} carefully and logically:
Your task is to analyze the provided {{filetype}} code snippet, identify
any bugs or errors present, and provide a corrected version of the code
that resolves these issues. Explain the problems you found in the
original code and how your fixes address them. The corrected code should
be functional, efficient, and adhere to best practices in
{{filetype}} programming.
```{{filetype}}
{{selection}}
```
Fixed code:
]]
local model_obj = prt.get_model "command"
prt.logger.info("Fixing bugs in selection with model: " .. model_obj.name)
prt.Prompt(params, prt.ui.Target.new, model_obj, nil, template)
end,
Optimize = function(prt, params)
local template = [[
You are an expert in {{filetype}}.
Your task is to analyze the provided {{filetype}} code snippet and
suggest improvements to optimize its performance. Identify areas
where the code can be made more efficient, faster, or less
resource-intensive. Provide specific suggestions for optimization,
along with explanations of how these changes can enhance the code's
performance. The optimized code should maintain the same functionality
as the original code while demonstrating improved efficiency.
```{{filetype}}
{{selection}}
```
Optimized code:
]]
local model_obj = prt.get_model "command"
prt.logger.info("Optimizing selection with model: " .. model_obj.name)
prt.Prompt(params, prt.ui.Target.new, model_obj, nil, template)
end,
UnitTests = function(prt, params)
local template = [[
I have the following code from {{filename}}:
```{{filetype}}
{{selection}}
```
Please respond by writing table driven unit tests for the code above.
]]
local model_obj = prt.get_model "command"
prt.logger.info("Creating unit tests for selection with model: " .. model_obj.name)
prt.Prompt(params, prt.ui.Target.enew, model_obj, nil, template)
end,
Debug = function(prt, params)
local template = [[
I want you to act as {{filetype}} expert.
Review the following code, carefully examine it, and report potential
bugs and edge cases alongside solutions to resolve them.
Keep your explanation short and to the point:
```{{filetype}}
{{selection}}
```
]]
local model_obj = prt.get_model "command"
prt.logger.info("Debugging selection with model: " .. model_obj.name)
prt.Prompt(params, prt.ui.Target.enew, model_obj, nil, template)
end,
CommitMsg = function(prt, params)
local futils = require "parrot.file_utils"
if futils.find_git_root() == "" then
prt.logger.warning "Not in a git repository"
return
else
local template = [[
I want you to act as a commit message generator. I will provide you
with information about the task and the prefix for the task code, and
I would like you to generate an appropriate commit message using the
conventional commit format. Do not write any explanations or other
words, just reply with the commit message.
Start with a short headline as summary but then list the individual
changes in more detail.
Here are the changes that should be considered by this message:
]] .. vim.fn.system "git diff --no-color --no-ext-diff --staged"
local model_obj = prt.get_model "command"
prt.Prompt(params, prt.ui.Target.append, model_obj, nil, template)
end
end,
SpellCheck = function(prt, params)
local chat_prompt = [[
Your task is to take the text provided and rewrite it into a clear,
grammatically correct version while preserving the original meaning
as closely as possible. Correct any spelling mistakes, punctuation
errors, verb tense issues, word choice problems, and other
grammatical mistakes.
]]
prt.ChatNew(params, chat_prompt)
end,
CodeConsultant = function(prt, params)
local chat_prompt = [[
Your task is to analyze the provided {{filetype}} code and suggest
improvements to optimize its performance. Identify areas where the
code can be made more efficient, faster, or less resource-intensive.
Provide specific suggestions for optimization, along with explanations
of how these changes can enhance the code's performance. The optimized
code should maintain the same functionality as the original code while
demonstrating improved efficiency.
Here is the code
```{{filetype}}
{{filecontent}}
```
]]
prt.ChatNew(params, chat_prompt)
end,
ProofReader = function(prt, params)
local chat_prompt = [[
I want you to act as a proofreader. I will provide you with texts and
I would like you to review them for any spelling, grammar, or
punctuation errors. Once you have finished reviewing the text,
provide me with any necessary corrections or suggestions to improve the
text. Highlight the corrected fragments (if any) using markdown backticks.
When you have done that subsequently provide me with a slightly better
version of the text, but keep close to the original text.
Finally provide me with an ideal version of the text.
Whenever I provide you with text, you reply in this format directly:
## Corrected text:
{corrected text, or say "NO_CORRECTIONS_NEEDED" instead if there are no corrections made}
## Slightly better text
{slightly better text}
## Ideal text
{ideal text}
]]
prt.ChatNew(params, chat_prompt)
end,
},
},
keys = {
{ "<leader>pr", "<cmd>'<,'>PrtRewrite<cr>", desc = "PtrRewrite", mode = "x" },
{ "<leader>pp", "<cmd>'<,'>PrtImplement<cr>", desc = "PtrImplement", mode = "x" },
{ "<leader>pn", "<cmd>'<,'>PrtVnew<cr>", desc = "PtrVnew", mode = "x" },
{ "<leader>pn", "<cmd>PrtVnew<cr>", desc = "PtrVnew", mode = "n" },
},
},
-- {
-- "zbirenbaum/copilot-cmp",
-- after = {
Expand Down

0 comments on commit 0afd862

Please sign in to comment.