diff --git a/README.md b/README.md index b223ed9e..548e3997 100644 --- a/README.md +++ b/README.md @@ -222,6 +222,18 @@ Default "noop" agent is `copilot`. For more information about extension agents, see [here](https://docs.github.com/en/copilot/using-github-copilot/using-extensions-to-integrate-external-tools-with-copilot-chat) You can install more agents from [here](https://github.com/marketplace?type=apps&copilot_app=true) +You can optionally define extra per-agent configuration like this: + +```lua +{ + agents = { + perplexityai = { + model = 'llama-3.1-sonar-huge-128k-online' + }, + } +} +``` + ### Contexts Contexts are used to determine the context of the chat. @@ -411,6 +423,7 @@ Also see [here](/lua/CopilotChat/config.lua): history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history callback = nil, -- Callback to use when ask response is received + agents = nil, -- default per agent configuration -- default selection selection = function(source) diff --git a/lua/CopilotChat/config.lua b/lua/CopilotChat/config.lua index 24149270..62a757be 100644 --- a/lua/CopilotChat/config.lua +++ b/lua/CopilotChat/config.lua @@ -90,6 +90,7 @@ local select = require('CopilotChat.select') ---@field highlight_headers boolean? ---@field history_path string? ---@field callback fun(response: string, source: CopilotChat.config.source)? +---@field agents table? ---@field selection nil|fun(source: CopilotChat.config.source):CopilotChat.config.selection? ---@field contexts table? ---@field prompts table? @@ -124,6 +125,7 @@ return { history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history callback = nil, -- Callback to use when ask response is received + agents = nil, -- default per agent configuration -- default selection selection = function(source) diff --git a/lua/CopilotChat/copilot.lua b/lua/CopilotChat/copilot.lua index ccf49e15..fffd39fc 100644 --- a/lua/CopilotChat/copilot.lua +++ b/lua/CopilotChat/copilot.lua @@ -10,6 +10,7 @@ ---@field model string? ---@field agent string? ---@field temperature number? +---@field extra_body table? ---@field on_progress nil|fun(response: string):nil ---@class CopilotChat.copilot.embed.opts @@ -707,19 +708,23 @@ function Copilot:ask(prompt, opts) full_response = full_response .. content end - local body = vim.json.encode( - generate_ask_request( - self.history, - prompt, - system_prompt, - generated_messages, - model, - temperature, - max_output_tokens, - not vim.startswith(model, 'o1') - ) + local request = generate_ask_request( + self.history, + prompt, + system_prompt, + generated_messages, + model, + temperature, + max_output_tokens, + not vim.startswith(model, 'o1') ) + if opts.extra_body then + request = vim.tbl_extend('force', request, opts.extra_body) + end + + local body = vim.json.encode(request) + if vim.startswith(model, 'claude') then self:enable_claude() end diff --git a/lua/CopilotChat/init.lua b/lua/CopilotChat/init.lua index d606c653..4598171a 100644 --- a/lua/CopilotChat/init.lua +++ b/lua/CopilotChat/init.lua @@ -637,6 +637,7 @@ function M.ask(prompt, config) model = selected_model, agent = selected_agent, temperature = config.temperature, + extra_body = config.agents and selected_agent and config.agents[selected_agent], on_progress = function(token) vim.schedule(function() state.chat:append(token)