Skip to content

Instantly share code, notes, and snippets.

@algal
Created January 17, 2025 02:26
Show Gist options
  • Save algal/d2ffdb58c748ccb3292cd78d6dd17518 to your computer and use it in GitHub Desktop.
Save algal/d2ffdb58c748ccb3292cd78d6dd17518 to your computer and use it in GitHub Desktop.
some emacs lisp snippets
;; This is miraculously all you need to add decent support for python.
;; It requires that you have uv installed. Then run `eglot` in a python buffer.
(use-package eglot
:defer t
:config
(add-to-list 'eglot-server-programs
`(python-mode . ,(split-string "uvx --quiet --from pyright pyright-langserver --stdio"))))
;; this is handy for dumping web pages into buffers, to then add them to context for AI with gptel
(defun alg/insert-markdown-from-url (url)
"Insert markdown generated from URL."
(interactive "sURL: ")
(let ((markdown (shell-command-to-string
(format "uvx --quiet --from trafilatura trafilatura --output-format markdown -u %s" url))))
(insert markdown)))
;; you don't need anything like this at all to use gptel
;; but this is what it ends up looking like if you want to default to sonnet 3.6
;; have API keys for a million models, and keep them all in a .authinfo file
(use-package gptel
:defer t
:config
;; model names: https://docs.anthropic.com/en/docs/about-claude/models
(setq gptel-model 'claude-3-5-sonnet-20241022
gptel-backend
(gptel-make-anthropic "Claude"
:key (plist-get (car (auth-source-search
:host "api.anthropic.com"))
:secret)
:stream t))
(gptel-make-gemini "Gemini"
:key (plist-get (car (auth-source-search
:host "generativelanguage.googleapis.com"))
:secret)
:stream t)
(gptel-make-ollama "OllamaLocal"
:host "localhost:11434"
:stream t
:models '("llama2:latest"))
;; Perplexity offers an OpenAI compatible API
(gptel-make-openai "Perplexity" ;Any name you want
:host "api.perplexity.ai"
:key (plist-get (car (auth-source-search
:host "api.perplexity.ai"))
:secret)
:endpoint "/chat/completions"
:stream t
:models '(;; has many more, check perplexity.ai
"llama-3-sonar-small-32k-chat"
"llama-3-sonar-small-32k-online"
"llama-3-sonar-large-32k-chat"
"llama-3-sonar-large-32k-online"))
;; Groq offers an OpenAI compatible API
(gptel-make-openai "Groq"
:host "api.groq.com"
:endpoint "/openai/v1/chat/completions"
:stream t
:key (plist-get (car (auth-source-search
:host "api.groq.com"))
:secret)
:models '("mixtral-8x7b-32768"
"gemma-7b-it"
"llama-3.1-405b-reasoning"
"llama-3.1-70b-versatile"
"llama-3.1-8b-instant"
"llama3-70b-8192"
"llama3-8b-8192"))
;; DeepSeek offers an OpenAI compatible API
(gptel-make-openai "DeepSeek" ;Any name you want
:host "api.deepseek.com"
:endpoint "/chat/completions"
:stream t
:key (plist-get (car (auth-source-search
:host "api.deepseek.com"))
:secret)
:models '("deepseek-chat" "deepseek-coder"))
(defun alg/gptel-toggle-verbosity ()
"Toggle Gptel's logging verbosity."
(interactive)
(if (eq gptel-log-level 'info)
(progn
(customize-set-variable 'gptel-log-level nil)
(customize-set-variable 'gptel-stream t)
(message "Disabled gptel logging"))
(progn
(customize-set-variable 'gptel-log-level 'info)
(customize-set-variable 'gptel-stream nil)
(message "Enabled gptel logging"))))
(alg/initmessage "Done loading gptel"))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment