diff --git a/doc/VectorCode.txt b/doc/VectorCode.txt index 14551f58..0824b0b0 100644 --- a/doc/VectorCode.txt +++ b/doc/VectorCode.txt @@ -29,6 +29,8 @@ Table of Contents *VectorCode-table-of-contents* - |VectorCode-integrations| - |VectorCode-milanglacier/minuet-ai.nvim| - |VectorCode-olimorris/codecompanion.nvim| + - |VectorCode-tools| + - |VectorCode-prompt-library| - |VectorCode-copilotc-nvim/copilotchat.nvim| - |VectorCode-setup| - |VectorCode-configuration-options| @@ -176,6 +178,9 @@ OLIMORRIS/CODECOMPANION.NVIM ~ + +TOOLS + The following requires VectorCode 0.7+ and a recent version of CodeCompanion.nvim. @@ -196,7 +201,7 @@ the `include_in_toolbox` option explained below. >lua ---@module "vectorcode" - opts = { + require("codecompanion").setup({ extensions = { vectorcode = { ---@type VectorCode.CodeCompanion.ExtensionOpts @@ -231,15 +236,15 @@ the `include_in_toolbox` option explained below. enabled = false, adapter = nil, query_augmented = true, - } + }, }, files_ls = {}, - files_rm = {} - } + files_rm = {}, + }, }, }, - } - } + }, + }) < The following are the common options that all tools supports: @@ -295,6 +300,59 @@ so that when the LLM decide what information to include, it _may_ be able to avoid omitting stuff related to query. +PROMPT LIBRARY + +On VectorCode 0.7.16+ and CodeCompanion.nvim 17.20.0+, VectorCode also provides +a customisable prompt library that helps you RAG local directories. The presets +provided by VectorCode are available here +<../../lua/vectorcode/integrations/codecompanion/prompts/presets.lua>, which +you can refer to if you wish to build local RAG APPs with CodeCompanion.nvim +and VectorCode. + +>lua + require("codecompanion").setup({ + extensions = { + vectorcode = { + ---@type VectorCode.CodeCompanion.ExtensionOpts + opts = { + ---@type table + prompt_library = { + { + ["Neovim Tutor"] = { + -- this is for demonstration only. + -- "Neovim Tutor" is shipped with this plugin already, + -- and you don't need to add it in the config + -- unless you're not happy with the defaults. + project_root = vim.env.VIMRUNTIME, + file_patterns = { "lua/**/*.lua", "doc/**/*.txt" }, + -- system_prompt = ..., + -- user_prompt = ..., + }, + }, + }, + }, + }, + }, + }) +< + +The `prompt_library` option is a mapping of prompt name (`string`) to a lua +table (type annotation available) that contains some information used to +generate the embeddings: + +- `project_root``string`, the path to the directory (for example, + `/usr/share/nvim/runtime/`); +- `file_patterns``string[]`, file name patterns that defines files to be vectorised. + You should either use absolute paths or relative paths from the project root; +- `system_prompt` and `user_prompt``string|fun(context:table):string|nil` + Theseoptions allow you to customise the prompts. See + codecompanion.nvim documentation + if you want to use a function here that build the prompts from the context. + +The first time will take some extra time for computing the embeddings, but the +subsequent runs should be a lot faster. + + COPILOTC-NVIM/COPILOTCHAT.NVIM ~ CopilotC-Nvim/CopilotChat.nvim @@ -310,13 +368,14 @@ contextual information about your codebase to enhance Copilot’s responses. Add this to your CopilotChat configuration: >lua - local vectorcode_ctx = require('vectorcode.integrations.copilotchat').make_context_provider({ - prompt_header = "Here are relevant files from the repository:", -- Customize header text - prompt_footer = "\nConsider this context when answering:", -- Customize footer text - skip_empty = true, -- Skip adding context when no files are retrieved - }) + local vectorcode_ctx = + require("vectorcode.integrations.copilotchat").make_context_provider({ + prompt_header = "Here are relevant files from the repository:", -- Customize header text + prompt_footer = "\nConsider this context when answering:", -- Customize footer text + skip_empty = true, -- Skip adding context when no files are retrieved + }) - require('CopilotChat').setup({ + require("CopilotChat").setup({ -- Your other CopilotChat options... contexts = { @@ -328,10 +387,10 @@ Add this to your CopilotChat configuration: prompts = { Explain = { prompt = "Explain the following code in detail:\n$input", - context = {"selection", "vectorcode"}, -- Add vectorcode to the context + context = { "selection", "vectorcode" }, -- Add vectorcode to the context }, -- Other prompts... - } + }, }) < @@ -366,7 +425,7 @@ You can configure VectorCode to be part of your sticky prompts, ensuring every conversation includes relevant codebase context automatically: >lua - require('CopilotChat').setup({ + require("CopilotChat").setup({ -- Your other CopilotChat options... sticky = { @@ -392,8 +451,8 @@ cached retrieval results. >lua tabline = { lualine_y = { - require("vectorcode.integrations").lualine(opts) - } + require("vectorcode.integrations").lualine(opts), + }, } < @@ -419,7 +478,7 @@ when neovim starts). If this bothers you, you can use the following snippet: end end, }, - } + }, } < @@ -575,12 +634,9 @@ in an autocmd: callback = function() local bufnr = vim.api.nvim_get_current_buf() cacher.async_check("config", function() - cacher.register_buffer( - bufnr, - { - n_query = 10, - } - ) + cacher.register_buffer(bufnr, { + n_query = 10, + }) end, nil) end, desc = "Register buffer for VectorCode", diff --git a/docs/neovim/README.md b/docs/neovim/README.md index 2a10b528..685480cc 100644 --- a/docs/neovim/README.md +++ b/docs/neovim/README.md @@ -1,4 +1,5 @@ # NeoVim Plugin + > [!NOTE] > This plugin depends on the CLI tool. Please go through > [the CLI documentation](../cli/README.md) and make sure the VectorCode CLI is working @@ -18,6 +19,8 @@ * [Integrations](#integrations) * [milanglacier/minuet-ai.nvim](#milanglacierminuet-ainvim) * [olimorris/codecompanion.nvim](#olimorriscodecompanionnvim) + * [Tools](#tools) + * [Prompt Library](#prompt-library) * [CopilotC-Nvim/CopilotChat.nvim](#copilotc-nvimcopilotchatnvim) * [Setup](#setup) * [Configuration Options](#configuration-options) @@ -155,6 +158,7 @@ or change the value of `async_opts.n_query` in the `setup` function [![asciicast](https://asciinema.org/a/8WP8QJHNAR9lEllZSSx3poLPD.svg)](https://asciinema.org/a/8WP8QJHNAR9lEllZSSx3poLPD?t=3) +#### Tools The following requires VectorCode 0.7+ and a recent version of CodeCompanion.nvim. The CodeCompanion extension will register the following tools: @@ -176,7 +180,7 @@ option explained below. ```lua ---@module "vectorcode" -opts = { +require("codecompanion").setup({ extensions = { vectorcode = { ---@type VectorCode.CodeCompanion.ExtensionOpts @@ -211,15 +215,15 @@ opts = { enabled = false, adapter = nil, query_augmented = true, - } + }, }, files_ls = {}, - files_rm = {} - } + files_rm = {}, + }, }, }, - } -} + }, +}) ``` The following are the common options that all tools supports: @@ -277,6 +281,58 @@ The `query` tool contains the following extra config options: query so that when the LLM decide what information to include, it _may_ be able to avoid omitting stuff related to query. +#### Prompt Library + +On VectorCode 0.7.16+ and CodeCompanion.nvim 17.20.0+, VectorCode also provides a +customisable prompt library that helps you RAG local directories. The presets +provided by VectorCode are available +[here](../../lua/vectorcode/integrations/codecompanion/prompts/presets.lua), which +you can refer to if you wish to build local RAG APPs with CodeCompanion.nvim and +VectorCode. + +```lua +require("codecompanion").setup({ + extensions = { + vectorcode = { + ---@type VectorCode.CodeCompanion.ExtensionOpts + opts = { + ---@type table + prompt_library = { + { + ["Neovim Tutor"] = { + -- this is for demonstration only. + -- "Neovim Tutor" is shipped with this plugin already, + -- and you don't need to add it in the config + -- unless you're not happy with the defaults. + project_root = vim.env.VIMRUNTIME, + file_patterns = { "lua/**/*.lua", "doc/**/*.txt" }, + -- system_prompt = ..., + -- user_prompt = ..., + }, + }, + }, + }, + }, + }, +}) +``` + +The `prompt_library` option is a mapping of prompt name (`string`) to a lua table +(type annotation available) that contains some information used to generate the +embeddings: + +- `project_root`: `string`, the path to the directory (for example, + `/usr/share/nvim/runtime/`); +- `file_patterns`: `string[]`, file name patterns that defines files to be vectorised. + You should either use absolute paths or relative paths from the project root; +- `system_prompt` and `user_prompt`: `string|fun(context:table):string|nil`: + These options allow you to customise the prompts. See + [codecompanion.nvim documentation](https://codecompanion.olimorris.dev/extending/prompts#recipe-2-using-context-in-your-prompts) + if you want to use a function here that build the prompts from the context. + +The first time will take some extra time for computing the embeddings, but the +subsequent runs should be a lot faster. + ### [CopilotC-Nvim/CopilotChat.nvim](https://github.com/CopilotC-Nvim/CopilotChat.nvim) [CopilotC-Nvim/CopilotChat.nvim](https://github.com/CopilotC-Nvim/CopilotChat.nvim) @@ -290,13 +346,14 @@ contextual information about your codebase to enhance Copilot's responses. Add t to your CopilotChat configuration: ```lua -local vectorcode_ctx = require('vectorcode.integrations.copilotchat').make_context_provider({ - prompt_header = "Here are relevant files from the repository:", -- Customize header text - prompt_footer = "\nConsider this context when answering:", -- Customize footer text - skip_empty = true, -- Skip adding context when no files are retrieved -}) - -require('CopilotChat').setup({ +local vectorcode_ctx = + require("vectorcode.integrations.copilotchat").make_context_provider({ + prompt_header = "Here are relevant files from the repository:", -- Customize header text + prompt_footer = "\nConsider this context when answering:", -- Customize footer text + skip_empty = true, -- Skip adding context when no files are retrieved + }) + +require("CopilotChat").setup({ -- Your other CopilotChat options... contexts = { @@ -308,10 +365,10 @@ require('CopilotChat').setup({ prompts = { Explain = { prompt = "Explain the following code in detail:\n$input", - context = {"selection", "vectorcode"}, -- Add vectorcode to the context + context = { "selection", "vectorcode" }, -- Add vectorcode to the context }, -- Other prompts... - } + }, }) ``` @@ -339,7 +396,7 @@ The integration includes caching to avoid sending duplicate context to the LLM, You can configure VectorCode to be part of your sticky prompts, ensuring every conversation includes relevant codebase context automatically: ```lua -require('CopilotChat').setup({ +require("CopilotChat").setup({ -- Your other CopilotChat options... sticky = { @@ -360,8 +417,8 @@ cached retrieval results. ```lua tabline = { lualine_y = { - require("vectorcode.integrations").lualine(opts) - } + require("vectorcode.integrations").lualine(opts), + }, } ``` `opts` is a table with the following configuration option: @@ -386,7 +443,7 @@ tabline = { end end, }, - } + }, } ``` This will further delay the loading of VectorCode to the moment you (or one of @@ -533,12 +590,9 @@ vim.api.nvim_create_autocmd("LspAttach", { callback = function() local bufnr = vim.api.nvim_get_current_buf() cacher.async_check("config", function() - cacher.register_buffer( - bufnr, - { - n_query = 10, - } - ) + cacher.register_buffer(bufnr, { + n_query = 10, + }) end, nil) end, desc = "Register buffer for VectorCode", diff --git a/lua/codecompanion/_extensions/vectorcode/init.lua b/lua/codecompanion/_extensions/vectorcode/init.lua index ec25b1ad..b5803ec2 100644 --- a/lua/codecompanion/_extensions/vectorcode/init.lua +++ b/lua/codecompanion/_extensions/vectorcode/init.lua @@ -8,9 +8,17 @@ ---@field tool_opts table --- Whether to add a tool group that contains all vectorcode tools. ---@field tool_group VectorCode.CodeCompanion.ToolGroupOpts +---Prompt library that automatically creates VectorCode collections on local files +---and set up prompts to let LLM search from certain directories. +--- +---The keys should be the human-readable name of the prompt (as they'd appear in +---the action menu), and values would be `VectorCode.CodeCompanion.PromptFactory.Opts` +---objects. +---@field prompt_library table local vc_config = require("vectorcode.config") local logger = vc_config.logger +local utils = require("vectorcode.utils") ---@type VectorCode.CodeCompanion.ExtensionOpts|{} local default_extension_opts = { @@ -25,6 +33,8 @@ local default_extension_opts = { files_rm = {}, }, tool_group = { enabled = true, collapse = true, extras = {} }, + + prompt_library = require("vectorcode.integrations.codecompanion.prompts.presets"), } ---@type sub_cmd[] @@ -52,7 +62,8 @@ local M = { opts.tool_opts = merge_tool_opts(opts.tool_opts) logger.info("Received codecompanion extension opts:\n", opts) local cc_config = require("codecompanion.config").config - local cc_integration = require("vectorcode.integrations").codecompanion.chat + local cc_integration = require("vectorcode.integrations").codecompanion + local cc_chat_integration = cc_integration.chat for _, sub_cmd in pairs(valid_tools) do local tool_name = string.format("vectorcode_%s", sub_cmd) if cc_config.strategies.chat.tools[tool_name] ~= nil then @@ -73,7 +84,7 @@ local M = { else cc_config.strategies.chat.tools[tool_name] = { description = string.format("Run VectorCode %s tool", sub_cmd), - callback = cc_integration.make_tool(sub_cmd, opts.tool_opts[sub_cmd]), + callback = cc_chat_integration.make_tool(sub_cmd, opts.tool_opts[sub_cmd]), opts = { requires_approval = opts.tool_opts[sub_cmd].requires_approval }, } logger.info(string.format("%s tool has been created.", tool_name)) @@ -105,6 +116,39 @@ local M = { tools = included_tools, } end + + for name, prompt_opts in pairs(opts.prompt_library) do + if prompt_opts.name ~= nil and prompt_opts.name ~= name then + vim.notify( + string.format( + "The name of `%s` is inconsistent in the opts (`%s`).\nRenaming to `%s`.", + name, + prompt_opts.name, + name + ), + vim.log.levels.WARN, + vc_config.notify_opts + ) + end + if type(prompt_opts.project_root) == "function" then + prompt_opts.project_root = prompt_opts.project_root() + end + if not utils.is_directory(prompt_opts.project_root) then + vim.notify( + string.format( + "`%s` is not a valid directory for CodeCompanion prompt library.\nSkipping `%s`.", + prompt_opts.project_root, + name + ), + vim.log.levels.WARN, + vc_config.notify_opts + ) + else + prompt_opts.name = name + cc_config.prompt_library[name] = + cc_chat_integration.prompts.register_prompt(prompt_opts) + end + end end), } diff --git a/lua/vectorcode/integrations/codecompanion/init.lua b/lua/vectorcode/integrations/codecompanion/init.lua index 5358c18b..ea793976 100644 --- a/lua/vectorcode/integrations/codecompanion/init.lua +++ b/lua/vectorcode/integrations/codecompanion/init.lua @@ -15,5 +15,6 @@ return { error("Unsupported version of codecompanion!") end end, + prompts = require("vectorcode.integrations.codecompanion.prompts"), }, } diff --git a/lua/vectorcode/integrations/codecompanion/prompts/init.lua b/lua/vectorcode/integrations/codecompanion/prompts/init.lua new file mode 100644 index 00000000..b06b53f3 --- /dev/null +++ b/lua/vectorcode/integrations/codecompanion/prompts/init.lua @@ -0,0 +1,151 @@ +local M = {} + +local vc_config = require("vectorcode.config") + +local utils = require("vectorcode.utils") + +---@param path string[]|string path to files or wildcards. +---@param project_root? string +---@param callback? VectorCode.JobRunner.Callback +function M.vectorise_files(path, project_root, callback) + if type(path) == "string" then + path = { path } + end + assert(not vim.tbl_isempty(path), "`path` cannot be empty") + + local jobrunner = + require("vectorcode.integrations.codecompanion.common").initialise_runner( + vc_config.get_user_config().async_backend == "lsp" + ) + + local args = { "vectorise", "--pipe" } + if project_root then + vim.list_extend(args, { "--project_root", project_root }) + end + vim.list_extend(args, path) + jobrunner.run_async(args, function(result, error, code, signal) + if type(callback) == "function" then + callback(result, error, code, signal) + end + end, 0) +end + +---@class VectorCode.CodeCompanion.PromptFactory.Opts +---@field name string? human-readable name of this prompt +---@field project_root string|(fun():string) project_root of the files to be added to the database +---Paths to the files in the local directory to be added to the database. +--- +---These should either be absolute paths, or relative to the project root. +---@field file_patterns string[]|(fun():string[]) +---See https://codecompanion.olimorris.dev/extending/prompts.html#recipe-2-using-context-in-your-prompts +--- +---Note: If a system prompt is set here, your default chat system prompt will be ignored. +---@field system_prompt? string|fun(context:table):string +---This contains some preliminary messages (filled into the chat buffer) that tells the LLM about the task. +---If you're overwriting the default message, make sure to include the tool (`@{vectorcode_query}`). +--- +---See https://codecompanion.olimorris.dev/extending/prompts.html#recipe-2-using-context-in-your-prompts +---@field user_prompt? string|fun(context:table):string + +---@param opts VectorCode.CodeCompanion.PromptFactory.Opts +function M.register_prompt(opts) + opts = vim.deepcopy(opts) + + if type(opts.file_patterns) == "function" then + opts.file_patterns = opts.file_patterns() + end + + assert( + ---@diagnostic disable-next-line: param-type-mismatch + type(opts.project_root) == "string" and utils.is_directory(opts.project_root), + string.format("`%s` is not a valid directory.", opts.project_root) + ) + assert( + ---@diagnostic disable-next-line: param-type-mismatch + opts.file_patterns ~= nil and (not vim.tbl_isempty(opts.file_patterns)), + "Recieved empty path specs." + ) + + assert(type(opts.name) == "string", "`name` cannot be `nil`.") + + local cc_common = require("vectorcode.integrations.codecompanion.common") + local constants = require("codecompanion.config").config.constants + local prompts = {} + + if opts.system_prompt then + table.insert( + prompts, + { role = constants.SYSTEM_ROLE, content = opts.system_prompt } + ) + end + table.insert(prompts, #prompts + 1, { + role = constants.USER_ROLE, + content = opts.user_prompt + or string.format( + [[I have some questions about the documents under the `%s` directory. +The files have been added to the database and can be searched by calling the @{vectorcode_query} tool. +When you call the tool, use `%s` as the value for the argument `project_root`. +Use the information returned by the tool to answer my questions, and cite the sources when appropriate. +If you need more information, call the tool with different search keywords or ask for more context and/or tools. + +Here's my question: + +- ]], + opts.project_root, + opts.project_root + ), + }) + return { + name = opts.name, + strategy = "chat", + opts = { + ignore_system_prompt = opts.system_prompt ~= nil, + pre_hook = function() + if vc_config.get_user_config().notify then + vim.notify( + string.format("Adding files under `%s` to the database.", opts.project_root), + vim.log.levels.INFO, + vc_config.notify_opts + ) + end + M.vectorise_files( + vim + .iter(opts.file_patterns) + :map(function(p) + if vim.fn.isabsolutepath(p) == 1 then + return p + else + return vim.fs.joinpath(opts.project_root, p) + end + end) + :totable(), + opts.project_root, + function(result, err, _, _) + if result ~= nil and not vim.tbl_isempty(result) then + vim.schedule_wrap(vim.notify)( + string.format( + "Vectorised %d new files.", + result.add or 0, + opts.project_root + ), + vim.log.levels.INFO, + vc_config.notify_opts + ) + elseif err ~= nil then + err = cc_common.flatten_table_to_string(err) + if err ~= "" then + vim.schedule_wrap(vim.notify)( + err, + vim.log.levels.WARN, + vc_config.notify_opts + ) + end + end + end + ) + end, + }, + prompts = prompts, + } +end +return M diff --git a/lua/vectorcode/integrations/codecompanion/prompts/presets.lua b/lua/vectorcode/integrations/codecompanion/prompts/presets.lua new file mode 100644 index 00000000..1338cb7d --- /dev/null +++ b/lua/vectorcode/integrations/codecompanion/prompts/presets.lua @@ -0,0 +1,9 @@ +---@type table +local M = {} + +M["Neovim Tutor"] = { + project_root = vim.fs.normalize(vim.env.VIMRUNTIME), + file_patterns = { "lua/**/*.lua", "doc/**/*.txt" }, +} + +return M diff --git a/lua/vectorcode/integrations/codecompanion/vectorise_tool.lua b/lua/vectorcode/integrations/codecompanion/vectorise_tool.lua index d26dde05..cba59190 100644 --- a/lua/vectorcode/integrations/codecompanion/vectorise_tool.lua +++ b/lua/vectorcode/integrations/codecompanion/vectorise_tool.lua @@ -52,7 +52,7 @@ The paths should be accurate (DO NOT ASSUME A PATH EXIST) and case case-sensitiv paths = { type = "array", items = { type = "string" }, - description = "Paths to the files to be vectorised. DO NOT use directories for this parameter.", + description = "Paths to the files to be vectorised. DO NOT use directories for this parameter. You may use wildcard here if the user instructed to do so.", }, project_root = { type = "string", diff --git a/lua/vectorcode/jobrunner/init.lua b/lua/vectorcode/jobrunner/init.lua index 48d77aff..b49754ce 100644 --- a/lua/vectorcode/jobrunner/init.lua +++ b/lua/vectorcode/jobrunner/init.lua @@ -1,5 +1,7 @@ local utils = require("vectorcode.utils") +---@alias VectorCode.JobRunner.Callback fun(result: table, error: table, code:integer, signal: integer?) + --- A class for calling vectorcode commands that aims at providing a unified API for both LSP and command-line backend. --- Implementations exist for both direct command-line execution (`cmd.lua`) and LSP (`lsp.lua`). --- For the format of the `result`, see https://github.com/Davidyz/VectorCode/blob/main/docs/cli.md#for-developers @@ -13,7 +15,7 @@ local utils = require("vectorcode.utils") --- - `signal`: _for cmd runner only_, the shell signal sent to the process. --- The `bufnr` is used for context, potentially to find the project root or attach LSP clients. --- Returns a job handle (e.g., PID or LSP request ID) or nil if the job couldn't be started. ----@field run_async fun(args: string[], callback:fun(result: table, error: table, code:integer, signal: integer?)?, bufnr: integer):(job_handle:integer?) +---@field run_async fun(args: string[], callback:VectorCode.JobRunner.Callback?, bufnr: integer):(job_handle:integer?) --- Runs a vectorcode command synchronously, blocking until completion or timeout. --- Executes the command specified by `args`. Waits for up to `timeout_ms` milliseconds. --- The `bufnr` is used for context, potentially to find the project root or attach LSP clients. diff --git a/lua/vectorcode/utils.lua b/lua/vectorcode/utils.lua index 341c96cc..605d429b 100644 --- a/lua/vectorcode/utils.lua +++ b/lua/vectorcode/utils.lua @@ -153,4 +153,24 @@ function M.make_changes_cb(max_num) end end +---@param f string +---@return boolean +function M.is_file(f) + if type(f) ~= "string" then + return false + end + local stats = vim.uv.fs_stat(f) + return stats and (stats.type == "file") or false +end + +---@param f string +---@return boolean +function M.is_directory(f) + if type(f) ~= "string" then + return false + end + local stats = vim.uv.fs_stat(f) + return stats and (stats.type == "directory") or false +end + return M