Skip to content

Instantly share code, notes, and snippets.

View palawer's full-sized avatar

palawer palawer

View GitHub Profile
@palawer
palawer / logrotate.sh
Created June 10, 2021 10:49
logrotate
vi /etc/logrotate.d/celery
/var/log/celery/*.log {
size=8192k
missingok
notifempty
copytruncate
rotate 10
compress
delaycompress
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk, scan
es = Elasticsearch()
BULK_SIZE = 100
INDEX_NAME = 'reviews'
ES_QUERY = {
"query": {
@palawer
palawer / group_by.sh
Created August 16, 2020 10:31
Group by a column and count
# $2 column number (starts at 1)
awk -F ',' '{print $2}' file.csv | sort | uniq -c
@palawer
palawer / webserver.sh
Created July 3, 2020 20:34
Python web server
python -m http.server 8000
TRUNCATE table RESTART IDENTITY;
### Move
w, e, b
### Edit
i, o
### Copy & Paste
v + d #cut
v + y #copy
p #paste
syntax on
set tabstop=4
set shiftwidth=4
set expandtab
set ai
set number
set hlsearch
set ruler
colorscheme peachpuff
from concurrent.futures import ThreadPoolExecutor, as_completed
def get_something(x):
return x
MAX_WORKERS = 8
iterable = range(20)
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as pool:
futures = [pool.submit(get_something, x) for x in iterable]
for future in as_completed(futures):
# <local_port>:localhost:<remote_port>
alias st='open "http://localhost:5601" && ssh server -L 5601:localhost:5601'
from datetime import datetime, timedelta
from concurrent.futures import ThreadPoolExecutor, as_completed
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk
from itertools import zip_longest
import csv
MAX_WORKERS = 4
BULK_SIZE = 50