|
1 | | -local check_cli_wrap = require("vectorcode.config").check_cli_wrap |
| 1 | +---@module "CopilotChat" |
2 | 2 |
|
3 | | ----@class VectorCode.CopilotChatOpts |
4 | | ----@field prompt_header string? |
5 | | ----@field prompt_footer string? |
6 | | ----@field skip_empty boolean? |
7 | | ----@field format_file (fun(file:VectorCode.Result):string)? |
| 3 | +---@class VectorCode.CopilotChat.ContextOpts |
| 4 | +---@field max_num number? |
| 5 | +---@field use_lsp boolean? |
8 | 6 |
|
9 | | ----Follow https://github.com/CopilotC-Nvim/CopilotChat.nvim/blob/5ea7845ef77164192a0d0ca2c6bd3aad85b202a1/lua/CopilotChat/context.lua#L10 |
10 | | ----@alias CopilotChat.context.embed {content:string, filename:string, filetype:string} |
| 7 | +local async = require("plenary.async") |
| 8 | +local vc_config = require("vectorcode.config") |
| 9 | +local notify_opts = vc_config.notify_opts |
| 10 | +local utils = require("CopilotChat.utils") |
| 11 | +local check_cli_wrap = vc_config.check_cli_wrap |
| 12 | +local job_runner = nil |
11 | 13 |
|
12 | | ----@param opts VectorCode.CopilotChatOpts? |
13 | | ----@return fun():CopilotChat.context.embed[] Function that can be used in CopilotChat's contextual prompt |
14 | | -local make_context_provider = check_cli_wrap(function(opts) |
15 | | - opts = vim.tbl_deep_extend("force", { |
16 | | - prompt_header = "The following are relevant files from the repository. Use them as extra context for helping with code completion and understanding:", |
17 | | - prompt_footer = "\nExplain and provide a strategy with examples about: \n", |
18 | | - skip_empty = true, |
19 | | - format_file = function(file) |
20 | | - local utils = require("CopilotChat.utils") |
21 | | - return string.format( |
22 | | - [[ |
23 | | -### File: %s |
24 | | -```%s |
25 | | -%s |
26 | | -``` |
27 | | -
|
28 | | ---- |
29 | | -]], |
30 | | - file.path, |
31 | | - utils.filetype(file.path), |
32 | | - file.document |
33 | | - ) |
34 | | - end, |
35 | | - }, opts or {}) |
36 | | - |
37 | | - return function() |
38 | | - local log = require("plenary.log") |
39 | | - local copilot_utils = require("CopilotChat.utils") |
40 | | - local vectorcode_cacher = require("vectorcode.config").get_cacher_backend() |
41 | | - -- Validate that CopilotChat is available |
42 | | - if not pcall(require, "CopilotChat") then |
43 | | - log.error("CopilotChat is not available. Please make sure it's installed.") |
44 | | - return {} |
| 14 | +---@param use_lsp boolean |
| 15 | +local function get_runner(use_lsp) |
| 16 | + if job_runner == nil then |
| 17 | + if use_lsp then |
| 18 | + job_runner = require("vectorcode.jobrunner.lsp") |
| 19 | + end |
| 20 | + if job_runner == nil then |
| 21 | + job_runner = require("vectorcode.jobrunner.cmd") |
| 22 | + if use_lsp then |
| 23 | + vim.schedule_wrap(vim.notify)( |
| 24 | + "Failed to initialise the LSP runner. Falling back to cmd runner.", |
| 25 | + vim.log.levels.WARN, |
| 26 | + notify_opts |
| 27 | + ) |
| 28 | + end |
45 | 29 | end |
| 30 | + end |
| 31 | + return job_runner |
| 32 | +end |
46 | 33 |
|
47 | | - -- Get all valid listed buffers |
48 | | - local listed_buffers = vim.tbl_filter(function(b) |
49 | | - return copilot_utils.buf_valid(b) |
50 | | - and vim.fn.buflisted(b) == 1 |
51 | | - and #vim.fn.win_findbuf(b) > 0 |
52 | | - end, vim.api.nvim_list_bufs()) |
| 34 | +---@param args string[] |
| 35 | +---@param use_lsp boolean |
| 36 | +---@param bufnr integer |
| 37 | +---@async |
| 38 | +local run_job = async.wrap(function(args, use_lsp, bufnr, callback) |
| 39 | + local runner = get_runner(use_lsp) |
| 40 | + assert(runner ~= nil) |
| 41 | + runner.run_async(args, callback, bufnr) |
| 42 | +end, 4) |
53 | 43 |
|
54 | | - local all_content = "" |
55 | | - local total_files = 0 |
56 | | - local processed_paths = {} |
| 44 | +---@param opts VectorCode.CopilotChat.ContextOpts? |
| 45 | +---@return CopilotChat.config.context |
| 46 | +local make_context = check_cli_wrap(function(opts) |
| 47 | + opts = vim.tbl_deep_extend("force", { |
| 48 | + max_num = 5, |
| 49 | + use_lsp = vc_config.get_user_config().async_backend == "lsp", |
| 50 | + }, opts or {}) |
57 | 51 |
|
58 | | - -- Process each buffer with registered VectorCode cache |
59 | | - for _, bufnr in ipairs(listed_buffers) do |
60 | | - local buf_path = vim.api.nvim_buf_get_name(bufnr) |
61 | | - log.debug("Current buffer name", buf_path) |
| 52 | + return { |
| 53 | + description = [[This gives you the ability to access the repository to find information that you may need to assist the user. Supports input (query). |
62 | 54 |
|
63 | | - -- Skip if already processed paths to avoid duplicates |
64 | | - if |
65 | | - not processed_paths[buf_path] and vectorcode_cacher.buf_is_registered(bufnr) |
66 | | - then |
67 | | - processed_paths[buf_path] = true |
68 | | - log.debug("Current registered buffer name", buf_path) |
| 55 | +- **Use at your discretion** when you feel you don't have enough information about the repository or project. |
| 56 | +- **Don't escape** special characters. |
| 57 | +- If a class, type or function has been imported from another file, this context may be able to find its source. Add the name of the imported symbol to the query. |
| 58 | +- The embeddings are mostly generated from source code, so using keywords that may be present in source code may help with the retrieval. |
| 59 | +- Avoid retrieving one single file because the retrieval mechanism may not be very accurate. |
| 60 | += If a query failed to retrieve desired results, a new attempt should use different keywords that are orthogonal to the previous ones but with similar meanings |
| 61 | +- Do not use exact query keywords that you have used in a previous context call in the conversation, unless the user instructed otherwise |
| 62 | +]], |
69 | 63 |
|
70 | | - local cache_result = |
71 | | - vectorcode_cacher.make_prompt_component(bufnr, opts.format_file) |
| 64 | + input = function(callback) |
| 65 | + vim.ui.input({ |
| 66 | + prompt = "Enter query> ", |
| 67 | + }, callback) |
| 68 | + end, |
72 | 69 |
|
73 | | - log.debug("VectorCode context", cache_result) |
74 | | - if cache_result and cache_result.content and cache_result.content ~= "" then |
75 | | - all_content = all_content .. "\n" .. cache_result.content |
76 | | - total_files = total_files + cache_result.count |
77 | | - end |
| 70 | + resolve = function(input, source, prompt) |
| 71 | + if not input or input == "" then |
| 72 | + input = prompt |
78 | 73 | end |
79 | | - end |
80 | 74 |
|
81 | | - if total_files > 0 or not opts.skip_empty then |
82 | | - local prompt_message = opts.prompt_header .. all_content .. opts.prompt_footer |
83 | | - log.debug("VectorCode context result", prompt_message) |
84 | | - return { |
85 | | - { |
86 | | - content = prompt_message, |
87 | | - filename = "vectorcode_context", |
88 | | - filetype = "markdown", |
89 | | - }, |
| 75 | + local args = { |
| 76 | + "query", |
| 77 | + "--pipe", |
| 78 | + "-n", |
| 79 | + tostring(opts.max_num), |
| 80 | + '"' .. input .. '"', |
| 81 | + "--project_root", |
| 82 | + source.cwd(), |
| 83 | + "--absolute", |
90 | 84 | } |
91 | | - end |
92 | 85 |
|
93 | | - log.debug("VectorCode context when no success", opts.prompt_footer) |
94 | | - -- If VectorCode is not available or has no results |
95 | | - if not opts.skip_empty then |
96 | | - return { |
97 | | - { |
98 | | - content = opts.prompt_footer, |
99 | | - filename = "error", |
100 | | - filetype = "markdown", |
101 | | - }, |
102 | | - } |
103 | | - end |
| 86 | + local result, err = run_job(args, opts.use_lsp, source.bufnr) |
| 87 | + if utils.empty(result) and err then |
| 88 | + error(utils.make_string(err)) |
| 89 | + end |
104 | 90 |
|
105 | | - return {} |
106 | | - end |
| 91 | + utils.schedule_main() |
| 92 | + return vim.tbl_map(function(item) |
| 93 | + return { |
| 94 | + content = item.document, |
| 95 | + filename = item.path, |
| 96 | + filetype = utils.filetype(item.path), |
| 97 | + } |
| 98 | + end, result) |
| 99 | + end, |
| 100 | + } |
107 | 101 | end) |
108 | 102 |
|
109 | | --- Update the integrations/init.lua file to include copilotchat |
110 | 103 | return { |
111 | | - ---Creates a context provider for CopilotChat |
112 | | - make_context_provider = make_context_provider, |
| 104 | + make_context = make_context, |
113 | 105 | } |
0 commit comments