Created
July 10, 2024 03:49
-
-
Save yanyaoer/8603d4fa151af86b54af23e846fc6664 to your computer and use it in GitHub Desktop.
Run Llama.cpp inside neovim
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
local function get_visual_selection() | |
local s_start = vim.fn.getpos("'<") | |
local s_end = vim.fn.getpos("'>") | |
local n_lines = math.abs(s_end[2] - s_start[2]) + 1 | |
local lines = vim.api.nvim_buf_get_lines(0, s_start[2] - 1, s_end[2], false) | |
lines[1] = string.sub(lines[1], s_start[3], -1) | |
if n_lines == 1 then | |
lines[n_lines] = string.sub(lines[n_lines], 1, s_end[3] - s_start[3] + 1) | |
else | |
lines[n_lines] = string.sub(lines[n_lines], 1, s_end[3]) | |
end | |
return table.concat(lines, '\n') | |
end | |
vim.api.nvim_create_user_command('Llama', | |
function(opts) | |
local prompt = vim.fn.input "prompt: " | |
local select = get_visual_selection() | |
local cmd = '~/Projects/agi/llama.cpp/llama-cli -m ~/Public/models/gemma_2_chinese_chat_q4_k_m.gguf --log-disable --temp 0.5 -p "' .. prompt .. select .. '"' | |
vim.api.nvim_command('term ' .. cmd) | |
end, | |
{ range = true } | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment