Skip to content

Instantly share code, notes, and snippets.

View nickva's full-sized avatar

Nick Vatamaniuc nickva

  • USA
View GitHub Profile
@nickva
nickva / couchdb2.2_on_centos7.sh
Last active October 4, 2018 18:35
CouchDB 2.2 setup CentOS 7
# Verify
$ gpg --verify apache-couchdb-*.tar.gz.asc
$ sha256sum --check apache-couchdb-*.tar.gz.sha256
$ sha512sum --check apache-couchdb-*.tar.gz.sha512
# Set max files limits
$ sudo emacs /etc/security/limits.conf
(add)
% works with https://gist.github.com/davisp/626acba57bb0f6da84a1d0de4d7237c3
maybe_trace(MochiReq) ->
case MochiReq:get_header_value("x-couchdb-trace") of
"true" ->
case is_tracer_running() of
true ->
couch_log:info("Tracer updating scope with request pid:~p", [self()]),
% https://github.com/rabbitmq/looking_glass/blob/master/src/lg.erl#L125
#!/usr/bin/env python
import sys
import socket
import itertools
import time
import requests
import couchdb
#!/usr/bin/env python
import requests
import sys, socket, hashlib, base64, itertools, time
import couchdb
DEBUG = True
PORT = 15984
DATABASE = 'db'
#!/usr/bin/env python
#
# Script to benchmark CouchDB compaction. It creates a db, populates it with
# various types of data based on provided parameters, then times the
# compaction.
#
# By default this script uses the 15986 (local) port so it creates and operates
# on one shard only.
#
# Specifying a parameter multiple time runs the script will all possible
@nickva
nickva / stampede_ddocs.py
Created July 20, 2017 18:44
Scripts which makes a lot of parallel requests to views in a single db. It is used to stress ddoc_cache component in CouchDB
#!/usr/bin/env python
#
# To install:
# $ virtualenv ./venv && . ./venv/bin/activate && pip install CouchDB
#
# To run:
# $ /stampede_ddocs.py -p 64 -w 10 -t 1000 -u https://dbhost
#
# Where:
# -p 64 : Start 64 OS processes
#!/usr/bin/env python
"""
Examples:
1. Using 50 concurrent workers create 100k dbs
$ dbtumble.py -u https://user:pass@host -w 50 -d 100000 -c
2. Open 100k dbs continuously by getting their info
$ dbtumble.py -u https://user:pass@host -w 50 -d 100000 -l
"""
#!/usr/bin/env python
import sys, couchdb, time, os, threading, uuid, argparse, random, traceback
from multiprocessing.dummy import Pool as ThreadPool
DB_URLS = [
'http://adm:pass@localhost:15984',
'http://adm:pass@localhost:25984',
'http://adm:pass@localhost:35984'
]
#!/usr/bin/env python
import argparse
import sys
import couchdb
import random
import string
import uuid
import time
import copy
#!/usr/bin/env python
import argparse
import sys
import couchdb
import random
import string
import uuid
import time
import copy