start new:
tmux
start new with session name:
tmux new -s myname
[program:gunicorn-graphite] | |
command=/usr/local/bin/gunicorn_django -u www-data -g www-data -b 127.0.0.1:8080 --log-file=/opt/graphite/storage/log/webapp/gunicorn.log /opt/graphite/webapp/graphite/settings.py | |
process_name=%(program_name)s | |
autostart=true | |
autorestart=true | |
stopsignal=QUIT | |
user=www-data | |
[program:carbon-cache] | |
command=python /opt/graphite/bin/carbon-cache.py --debug start |
{ | |
"template": "logstash-*", | |
"settings" : { | |
"number_of_shards" : 1, | |
"number_of_replicas" : 0, | |
"index" : { | |
"query" : { "default_field" : "@message" }, | |
"store" : { "compress" : { "stored" : true, "tv": true } } | |
} | |
}, |
http { | |
log_format filt '$remote_addr - $remote_user [$time_local] "$_request" ' | |
'$status $body_bytes_sent "$http_referer" ' | |
'"$http_user_agent" "$http_x_forwarded_for"'; | |
server { | |
location /login { | |
# `set` is provided by the Rewrite module | |
set $filter "password|secret"; |
#!/bin/bash | |
# This file was placed here by Gitlab. It makes sure that your pushed commits | |
# will be processed properly. | |
while read oldrev newrev ref | |
do | |
# For every branch or tag that was pushed, create a Resque job in redis. | |
pwd=`pwd` | |
reponame=`basename "$pwd" | cut -d. -f1` |
This statement grants permissions to any user to perform any S3 action on objects in the specified bucket. However, the request must originate from the range of IP addresses specified in the condition. The condition in this statement identifies 192.168.143.* range of allowed IP addresses with one exception, 192.168.143.188. | |
Note that the IPAddress and NotIpAddress values specified in the condition uses CIDR notation described in RFC 2632. For more information, go to http://www.rfc-editor.org/rfc/rfc4632.txt. |
cmp = (a, b) -> if a > b then 1 else if a < b then -1 else 0 | |
Array::sortBy = (key, options) -> | |
@sort (a, b) -> | |
[av, bv] = [a[key], b[key]] | |
[av, bv] = [av.toLowerCase(), bv.toLowerCase()] if options.lower | |
cmp av, bv |
sudo apt-get update | |
sudo apt-get install -y git-core build-essential libssl-dev libboost-all-dev libdb5.1-dev libdb5.1++-dev libgtk2.0-dev | |
git clone https://github.com/bitcoin/bitcoin.git | |
cd bitcoin/src | |
make -f makefile.unix clean; make -f makefile.unix USE_UPNP= bitcoind | |
# This is not the way to run a production web server | |
# | |
# It is a quick and easy way to get php and a database working | |
# on a low end machine for educational purposes. | |
# | |
# This will require Debian Wheezy for PHP 5.4 | |
# I think that is coming to the Raspberry Pi soon | |
sudo apt-get install php5-cli php5-sqlite sqlite3 |
#!/bin/bash | |
# herein we backup our indexes! this script should run at like 6pm or something, after logstash | |
# rotates to a new ES index and theres no new data coming in to the old one. we grab metadatas, | |
# compress the data files, create a restore script, and push it all up to S3. | |
TODAY=`date +"%Y.%m.%d"` | |
INDEXNAME="logstash-$TODAY" # this had better match the index name in ES | |
INDEXDIR="/usr/local/elasticsearch/data/logstash/nodes/0/indices/" | |
BACKUPCMD="/usr/local/backupTools/s3cmd --config=/usr/local/backupTools/s3cfg put" | |
BACKUPDIR="/mnt/es-backups/" | |
YEARMONTH=`date +"%Y-%m"` |