Created
August 17, 2025 19:53
-
-
Save pete/9b650f878ea4317f3c1bc772b43174af to your computer and use it in GitHub Desktop.
ollama on Plan 9
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/rc | |
| # This script was written entirely out of perversity: | |
| # it is LLM code-completion for acme under Plan 9. | |
| # It will attempt to fill in a chunk of code under the cursor, | |
| # in acme | |
| # under Plan 9 | |
| # using only tools available in Plan 9 | |
| # to talk to ollama. | |
| # I have mixed the sacred and the profane. | |
| # You just put it in the tag in the acme window, and then you | |
| # middle-click it and it will complete a configurable number of | |
| # tokens. The result will be highlighted. | |
| # The only thing you need that doesn't come with Plan 9 is the Go port of jq. | |
| out=/fd/1 | |
| host=kan:11434 | |
| model=codellama:7b-code | |
| diag=/dev/null | |
| temp=0 | |
| tokens=128 | |
| while(! ~ $#* 0){ | |
| switch($1) { | |
| case -i ; out=wrsel; shift | |
| case -e ; out=/fd/1; shift | |
| case -a ; shift; host=$1; shift | |
| case -m ; shift; model=$1; shift | |
| case -t ; shift; temp=$1; shift | |
| case -n ; shift; tokens=$1; shift | |
| case -v ; diag=/fd/2; shift | |
| } | |
| } | |
| cd /mnt/acme/$winid || exit winid | |
| addrs = `{{echo -n 'addr=dot' >[1=3];cat <[0=4]} >[3]ctl <[4]addr} | |
| body = `''{awk -v 'si=' ^ $addrs(1) -v 'ei=' ^ $addrs(2) ' | |
| {f=f $0 "\n"} | |
| function fix(str) { | |
| # For whatever reason, \ and " behave differently in substitutions. | |
| gsub(/\\/, "\&", str) | |
| gsub(/"/, "\\\"", str) | |
| gsub(/\n/, "\\n", str) | |
| gsub(/\t/, "\\t", str) | |
| return str | |
| } | |
| END{ | |
| s = fix(substr(f, 1, si)) | |
| e = fix(substr(f, ei+1)) | |
| printf "\"prompt\":\"%s\",\"suffix\":\"%s\"", s, e | |
| } | |
| ' body} | |
| bpl = '{"model":"' ^ $model ^ '","stream":false,"options":{"num_predict":' ^ $tokens ^ ',"temperature":' ^ $temp ^ ',"top_p":0.9,"stop":["<EOT>"]},' ^ $body ^ '}' | |
| echo $bpl > /tmp/last-ollama-payload.js | |
| echo http:// ^ $host ^ /api/generate → $model > $diag | |
| echo ' ' $bpl > $diag | |
| hget -p $bpl http:// ^ $host ^ /api/generate |\ | |
| tee /tmp/last-ollama-resp.js |\ | |
| gojq -r .response > $out |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment