Last active
June 14, 2025 21:29
-
-
Save luke10x/92b28416f91328d4fc2240a440a5dd2b to your computer and use it in GitHub Desktop.
Generate commit messages using Completion API
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/node | |
/* | |
How to use this shit? | |
1. Install llama.cpp | |
2. Download the completion DB from https://huggingface.co/ | |
3. Start local server: `llama-server -m ~/Downloads/Qwen2.5-7B-Instruct-Q8_0.gguf` | |
4. Stage your diff changes and don't commit yet: `git add .` | |
5. Run this file `node generate-commit-message.mjs` | |
*/ | |
import { execSync } from 'child_process' | |
import http from 'http' | |
const diff = execSync('git diff --staged').toString().trim(); | |
const payload = JSON.stringify({ | |
messages: [ | |
{ | |
role: "system", | |
content: `Please write me Git commit message based on the diff given by user | |
adhering to the following requirements: | |
- Header: Start message title with appropriate emoji (Gitmoji); | |
- Title should be imperative; | |
- title and message separated by empty line | |
- message body should describe the changes in the diff; | |
- message uses bulleted lists if necessary. | |
Please start answering to user with commit message already, | |
without explanations, or citing code from the diff - just the commit message. | |
Now the diff is this: ` + diff | |
}, | |
] | |
}); | |
const options = { | |
hostname: '127.0.0.1', | |
port: 8080, | |
path: '/v1/chat/completions', | |
method: 'POST', | |
headers: { | |
'Content-type': 'application/json', | |
'Content-Length': Buffer.byteLength(payload) | |
} | |
} | |
const req = http.request(options, (res) => { | |
let data = ''; | |
res.on('data', (chunk) => { | |
data += chunk; | |
}); | |
res.on('end', () => { | |
let jsonResponse; | |
try { | |
jsonResponse = JSON.parse(data); | |
} catch (error) { | |
console.error('Error parsing JSON: ', data, error.message) | |
} | |
try { | |
const answer = jsonResponse.choices[0].message.content; | |
console.log(answer); | |
process.exit(0); | |
} catch (error) { | |
console.error("cannot find the answer in response"); | |
process.exit(1); | |
} | |
}); | |
res.on('error', (error) => { | |
console.error('Erro making a call to completion endpoint', error.message); | |
process.exit(1); | |
}); | |
}); | |
req.write(payload) | |
req.end(); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { spawn } from 'child_process'; | |
import { execSync } from 'child_process'; | |
// === CONFIG === | |
const MODEL_PATH = '/Users/lape/Models/Qwen2.5-7B-Instruct-Q8_0.gguf'; | |
// === SYSTEM PROMPT === | |
const systemPrompt = ` | |
Please write me a Git commit message based on the diff given by user | |
adhering to the following requirements: | |
- Header: Start message title with appropriate emoji (Gitmoji), | |
in the same line as the rest of the title; | |
- Title should be imperative; | |
- Title and message should be separated by an empty line; | |
- Message body should describe the changes in the diff; | |
- Message uses bulleted lists if necessary. | |
Please start answering to user with commit message already, | |
without explanations, or citing any code from the diff – just the commit message. | |
`.trim(); | |
// === LOAD DIFF FILE === | |
let userPrompt; | |
const diff = execSync('git diff --staged').toString().trim(); | |
userPrompt = diff; | |
// === CONSTRUCT CHATML PROMPT === | |
const chatmlPrompt = ` | |
<|im_start|>system | |
${systemPrompt}<|im_end|> | |
<|im_start|>user | |
${userPrompt}<|im_end|> | |
<|im_start|>assistant | |
`.trim(); | |
// === SPAWN LLAMA-CLI SAFELY === | |
const llama = spawn('llama-cli', [ | |
'--model', MODEL_PATH, | |
'--n-predict', '-1', | |
'--ctx-size', '6500', | |
'--prompt', chatmlPrompt, | |
], | |
); | |
llama.stdout.on('data', (data) => { | |
process.stdout.write(data); | |
if (data.equals(Buffer.from([0x0a, 0x0a, 0x3e, 0x20]))) { | |
console.log("EOM"); | |
process.exit(0); | |
} | |
}); | |
llama.stderr.on('data', (data) => { | |
process.stderr.write(data); | |
}); | |
llama.on('exit', (code) => { | |
if (code !== 0) { | |
console.error(`\n❌ llama-cli exited with code ${code}`); | |
} else { | |
console.error(`\n❌ llama-cli exited with code ${code}`); | |
} | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment