Created
February 4, 2025 15:12
-
-
Save nickva/0146667758d9d1b37ee873af133a7eda to your computer and use it in GitHub Desktop.
CouchDB K6 script
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// Examples: | |
// 1) Run all secenarios for 30sec with given user/pass: | |
// $ BENCH_DURATION=30s BENCH_USER=adm BENCH_PASS=pass run k6.js | |
// 2) Run just doc_update | |
// $ BENCH_SCENARIOS=doc_update k6 run $script.js | |
// 3) Run just doc_get at 10 rps, starting with 25k docs of 64KB each: | |
// $ BENCH_DOCS=25000 BENCH_SCENARIOS=doc_get BENCH_GET_RATE=10 k6 run $script.js | |
import http from 'k6/http'; | |
import encoding from 'k6/encoding'; | |
import { sleep } from 'k6'; | |
// Parameters, defaults or from the environment | |
// Note: environemnt variables are prefixed with "BENCH_" | |
// | |
const URL = env_str('URL', 'http://localhost:15984'); | |
const USER = env_str('USER', 'adm'); | |
const PASS = env_str('PASS', 'pass'); | |
const DB = env_str('DB', 'bench_db'); | |
const Q = env_str('Q', '1'); | |
const DOCS = env_num('DOCS', 100000); | |
const DOC_SIZE = env_num('DOC_SIZE', 32); | |
const DURATION = env_str('DURATION', '5m'); | |
const GET_RATE = env_num('GET_RATE', 2500); | |
const UPDATE_RATE = env_num('UPDATE_RATE', 500); | |
const INSERT_RATE = env_num('INSERT_RATE', 500); | |
const TAG = env_str('TAG', ''); | |
const SCENARIOS = env_str('SCENARIOS', 'doc_get,doc_insert'); | |
const XHEADER = env_str('XHEADER', ''); | |
const BATCH_SIZE = 1000; | |
const SCENARIO_DEFAULTS = { | |
executor: 'constant-arrival-rate', | |
duration: DURATION, | |
timeUnit: '1s', | |
preAllocatedVUs: 500, | |
maxVUs: GET_RATE * 4 | |
}; | |
// Derived params | |
const DB_URL = `${URL}/${DB}`; | |
const HEADERS = get_headers(XHEADER, USER, PASS); | |
const SETUP_PAR = {'headers': HEADERS, tags: {name: 'setup'}}; | |
const GET_PAR = {'headers': HEADERS, tags: {name: 'doc_get'}}; | |
const PUT_PAR = {'headers': HEADERS, tags: {name: 'doc_update'}}; | |
const POST_PAR = {'headers': HEADERS, tags: {name: 'doc_insert'}}; | |
const BULK_PAR = {'headers': HEADERS, tags: {name: 'bulk_docs'}}; | |
const SCEN_OPTS = scenarios(SCENARIOS, SCENARIO_DEFAULTS, GET_RATE, UPDATE_RATE, INSERT_RATE); | |
// Callbacks & options. These are what k6 expects to call | |
export const options = { | |
setupTimeout: '60m', | |
// These are bogus, always passing thresholds just so we can | |
// see the individual tagged requests times in the summary | |
// but in principle these could be turned into a pass/fail test | |
thresholds: { | |
'http_req_duration{name:doc_get}' : ['p(99)>=0'], | |
'http_req_duration{name:doc_update}' : ['p(99)>=0'], | |
'http_req_duration{name:doc_insert}' : ['p(99)>=0'], | |
'http_req_duration{name:bulk_docs}' : ['p(99)>=0'] | |
}, | |
...SCEN_OPTS | |
}; | |
export function setup() { | |
console.log(` | |
* tag: ${TAG} | |
* scanarios: ${SCENARIOS} | |
* url: ${URL} | |
* user: ${USER} | |
* q: ${Q} | |
* docs: ${DOCS} | |
* doc_size: ${DOC_SIZE} | |
* duration: ${DURATION} | |
\n`); | |
let res; | |
res = http.put(DB_URL + "?q=" + Q, null, SETUP_PAR); | |
if (res.status == 412) { | |
res = http.del(DB_URL, null, SETUP_PAR); | |
if (res.status != 200) { | |
throw new Error(`Could not delete old DB ${DB_URL} ${res.body}`); | |
} | |
res = http.put(DB_URL + "?q=" + Q, null, SETUP_PAR); | |
} | |
if (res.status != 201) { | |
throw new Error(`Could not create DB ${DB_URL} ${res.body} ${res.status}`); | |
} | |
insert_docs(DOCS, BATCH_SIZE, DOC_SIZE); | |
sleep(10); | |
} | |
export function teardown(data) { | |
let res = http.del(DB_URL, null, SETUP_PAR); | |
if (res.status != 200) { | |
throw new Error(`In teardown could not delete DB ${DB_URL} ${res.body}`); | |
} | |
} | |
export function doc_get () { | |
let doc_id = fmt_doc_id(rand_int(0, DOCS-1)); | |
http.get(`${DB_URL}/${doc_id}`, GET_PAR); | |
} | |
export function doc_update () { | |
let doc_id = fmt_doc_id(rand_int(0, DOCS-1)); | |
let res = http.get(`${DB_URL}/${doc_id}`, GET_PAR); | |
if (res.status != 200) { | |
throw new Error(`Got error ${res.status} getting document ${doc_id}`); | |
}; | |
let doc = res.json(); | |
delete doc['_id']; | |
doc['data'] = rand_str(DOC_SIZE); | |
http.put(`${DB_URL}/${doc_id}?rev=${doc._rev}`, JSON.stringify(doc), PUT_PAR); | |
} | |
export function doc_insert () { | |
let doc = {'data': rand_str(DOC_SIZE)}; | |
http.post(`${DB_URL}`, JSON.stringify(doc), POST_PAR); | |
} | |
/// End of callback functions | |
// Helpers | |
function env_str(name, default_val) { | |
name = 'BENCH_' + name; | |
return __ENV[name] ? __ENV[name] : default_val; | |
} | |
function env_num(name, default_val) { | |
name = 'BENCH_' + name; | |
return __ENV[name] ? Number(__ENV[name]) : default_val; | |
} | |
function scenarios(selected_str, base_opts, get_rate, update_rate, insert_rate) { | |
let scenario_keys = selected_str.split(','); | |
let scenarios_available = { | |
doc_get : {...base_opts, ...{exec: 'doc_get'}, ...{rate: get_rate}}, | |
doc_update : {...base_opts, ...{exec: 'doc_update'}, ...{rate: update_rate}}, | |
doc_insert : {...base_opts, ...{exec: 'doc_insert'}, ...{rate: insert_rate}} | |
}; | |
let selected = Object.fromEntries(scenario_keys.map(k => [k, scenarios_available[k]])); | |
// If we're not using doc_update, we can optimize our executors to to not | |
// parse response bodies | |
return {'scenarios': selected, 'discardResponseBodies': ! ('doc_update' in selected)}; | |
} | |
function get_headers(xtra_header, user, pass) { | |
let b64 = encoding.b64encode(`${user}:${pass}`); | |
let xheader = parse_header(xtra_header); | |
return { | |
'authorization': `Basic ${b64}`, | |
'content-type':'application/json', | |
...xheader | |
} | |
}; | |
function parse_header(header_str) { | |
let [name, ...rest] = header_str.split(":"); | |
let val = rest.join(":"); | |
return name ? {name : val} : {}; | |
} | |
function insert_docs(num, bsize, dsize) { | |
let doc_id = 0; | |
let batches = Math.trunc(num / bsize); | |
for(let i=0; i<batches; i++) { | |
doc_id = insert_batch(doc_id, bsize, dsize) | |
}; | |
let remaining = num - (batches * bsize); | |
return insert_batch(doc_id, remaining, dsize); | |
} | |
function insert_batch(doc_id, count, size) { | |
let docs_arr = []; | |
let doc; | |
for(let i=0; i<count; i++) { | |
doc = {'_id': fmt_doc_id(doc_id), 'data': rand_str(size)}; | |
docs_arr.push(doc); | |
doc_id ++; | |
}; | |
let req = {'docs': docs_arr}; | |
let res = http.post(`${DB_URL}/_bulk_docs?w=3`, JSON.stringify(req), BULK_PAR); | |
if (res.status != 201 && res.status != 202) { | |
throw new Error(`Failed _bulk_docs ${res.status}`); | |
} | |
return doc_id; | |
} | |
function fmt_doc_id(n){ | |
// make the length 16 bytes (32 chars) to be the same | |
// size as a uuid for the case when we're inserting | |
// random docs with a post {} request | |
return String(n).padStart(32, '0'); | |
} | |
function rand_int(min, max) { | |
min = Math.ceil(min); | |
max = Math.floor(max); | |
return Math.floor(Math.random() * (max - min + 1)) + min; | |
} | |
function rand_str(len) { | |
const chars = 'abcdefghijklmnopqrstuvwxyz0123456789'; | |
const clen = chars.length; | |
let res = ''; | |
while (len--) res += chars[Math.random() * clen | 0]; | |
return res; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Example: