redis-cli -n 4 KEYS "mapper-*" | xargs redis-cli -n 4 DEL
# BACKUP
github-backup -t ghp_XXX -O ampeersenergy --repositories --wikis --private -o .
$ git commit -m "Something terribly misguided"
$ git reset --soft HEAD~
<< edit files as necessary >>
$ git add ...
$ git commit -c ORIG_HEAD
git branch -d <branchName>
git push origin --delete <branchName>
git remote prune origin --dry-run
git checkout master
git reset --hard e3f1e37
git push --force origin master
# Then to prove it (it won't print any diff)
git diff master..origin/master
git checkout --orphan newBranch
git add -A # Add all files and commit them
git commit
git branch -D master # Deletes the master branch
git branch -m master # Rename the current branch to master
git push -f origin master # Force push master branch to github
git gc --aggressive --prune=all # remove the old files
- set multiple origins: http://stackoverflow.com/questions/14290113/git-pushing-code-to-two-remotes
# delete remote branches batch
pattern/v01
pattern/v02
git branch -r | awk -F/ '/\/PATTERN/{print $2"/"$3}' | xargs -I {} git push origin :{}
pattern-v01
pattern-v02
git branch -r | awk -F/ '/\/PATTERN/{print $2}' | xargs -I {} git push origin :{}
# list all git repos in dir
find . -name .git -type d -prune
python profiling
1a. create log: python -m cProfile -o log.cprof -s cumtime main.py
2a. open log: pyprof2calltree -k -i log.cprof
1b. create log: python -m cProfile -o program.prof main.py
2b. open log with: snakeviz program.prof
rm all pyc in project
find . -name "*.pyc" -exec rm -rf {} \;
# change all file extentions from ext1 to ext2
find . -name "*.ext1" -exec bash -c 'mv "$1" "${1%.ext1}".ext2' - '{}' \;
# append / preprend
sed -e 's/^/prefix/' file.txt
sed 's/$/suffix/' file.txt
#copy folder recursive without node_modules
rsync -rv --progress --exclude=node_modules ~/SOURCE ~/TARGET
ffmpeg -user_agent "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/601.7.8 (KHTML, like Gecko) Version/9.1.3 Safari/537.86.7" -i https://<URL>/master.m3u8<TOKEN_IF_BEHIND_LOGIN> -c copy hls_stream.mkv
select all except STRING: ^(?!.*STRING).*$
select all empty lines: ^(?:[\t ]*(?:\r?\n|\r))+
select all leading whitespace: ^\s+
select all newlines: [\r\n]
select all from STR1 to STR2: \STR1.*?()STR2
select all where [ not followed by ' or r: \[(?!\'|r)
FIND: (\.[0-9]{1,1})[^0-9]
REPLACE: $10' # $1 = content from capturegroup, 0' added stuff
current:
'4-141004': '12.31',
'5-110003': '22.4', # .4' will selected
'5-1F0001': '13.2', # .2' will selected
should:
'4-141004': '12.31',
'5-110003': '22.40',
'5-1F0001': '13.20',
rm stopwords from corpus
sed -f <(sed 's/.*/s|\\\<&\\\>||g/' stoplist.txt) corpus.txt >> out.txt
find missing symbol: c++filt SYMBOL
latex focus: text.tex.latex
# downloads all pl and pm files into current dir from remote
rsync -rv -e "ssh -p PORT" --include='*.pl' --include='*.pm' --exclude='*.out' --exclude='*.log' USER@IP:/PATH/PATH2 .
# dump all db in file
mysqldump -u root -p --all-databases > all_databases.sql
# import all dbs
mysql -u root -p < database_dump.sql
# import one fromm all db dump
mysql --one-database database_name < all_databases.sql
sed -n '/^-- Current Database: `DBNAME`/,/^-- Current Database: `/p' all_databases.sql > DBNAME.sql
# MATERIALIZED VIEW FAKE TRIGGER
DELIMITER $$
DROP TRIGGER IF EXISTS SagGwsCatItemEntity_after_insert$$
CREATE DEFINER=`sagSecSql`@`%` TRIGGER `SagGwsCatItemEntity_after_insert` AFTER INSERT ON `SagGwsCatItemEntity` FOR EACH ROW BEGIN
INSERT INTO SagGwsCatItemEntity_map (CATALOGITEMNUMBER,MANUFACTURER,MANUFACTURERITEMNUMBER) SELECT CATALOGITEMNUMBER,MANUFACTURER,REPLACE(MANUFACTURERITEMNUMBER, " ", "") FROM SagGwsCatItemEntity WHERE CATALOGITEMNUMBER = NEW.CATALOGITEMNUMBER;
END $$
DROP TRIGGER IF EXISTS SagGwsCatItemEntity_after_update$$
CREATE DEFINER=`sagSecSql`@`%` TRIGGER `SagGwsCatItemEntity_after_update` AFTER UPDATE ON `SagGwsCatItemEntity` FOR EACH ROW BEGIN
UPDATE SagGwsCatItemEntity_map a, SagGwsCatItemEntity b SET a.MANUFACTURER = b.MANUFACTURER, a.MANUFACTURERITEMNUMBER = REPLACE(b.MANUFACTURERITEMNUMBER, " ", "") WHERE a.CATALOGITEMNUMBER = b.CATALOGITEMNUMBER;
END $$
DROP TRIGGER IF EXISTS SagGwsCatItemEntity_after_delete$$
CREATE DEFINER=`sagSecSql`@`%` TRIGGER `SagGwsCatItemEntity_after_delete` AFTER DELETE ON `SagGwsCatItemEntity` FOR EACH ROW BEGIN
DELETE FROM SagGwsCatItemEntity_map WHERE CATALOGITEMNUMBER = OLD.CATALOGITEMNUMBER;
END $$
DELIMITER ;
CREATE EVENT toogleRoleA
ON SCHEDULE AT CURRENT_TIMESTAMP + INTERVAL 1 MINUTE
DO
INSERT INTO role (rolename, restriction) VALUES('tmpRole', 'tmpRestriction');
// at my.cnf
# Default Homebrew MySQL server config
[mysqld]
# Only allow connections from localhost
# bind-address = 127.0.0.1
bind-address = 0.0.0.0
default_authentication_plugin=mysql_native_password
mysql -u root -p
ALTER USER'root'@'localhost' IDENTIFIED WITH mysql_native_password BY 'PW';
// diff fn
const arrA = [];
const arrB = [];
function diff() {
let a = new Set(arrA);
let b = new Set(arrB);
let diff = new Set(
[...a].filter(x => !b.has(x)));
return [...diff]
}
// single write
function w(num) {
console.log('write: ' + num)
}
let timer;
const interval = 200
function req(which) {
clearTimeout(timer)
timer = setTimeout(() => w(which), interval);
}
req(1)
req(2)
req(3)
req(4)
Search with tuples eg for contrainst violations
SELECT *
FROM ZRM_ZW2
WHERE (ZRM_ZRID, ZW2_UT1) IN (
SELECT ZRM_ZRID, TO_DATE(TO_CHAR(ZW2_UT1, 'YYYY-MM-DD' ) || ' 00:00:0', 'YYYY-MM-DD HH24:MI:SS')
FROM ZRM_ZW2
WHERE TO_CHAR(ZW2_UT1, 'YYYY-MM-DD HH24:MI:SS') LIKE '%04:59%'
);
const mapping = [
{ instanceId: 415, opcName: 'OPC323', opcId: 369 },
{ instanceId: 416, opcName: 'OPC326', opcId: 372 },
{ instanceId: 417, opcName: 'OPC328', opcId: 374 },
{ instanceId: 418, opcName: 'OPC331', opcId: 377 },
{ instanceId: 419, opcName: 'OPC333', opcId: 379 },
{ instanceId: 420, opcName: 'OPC336', opcId: 382 },
{ instanceId: 421, opcName: 'OPC338', opcId: 384 },
{ instanceId: 422, opcName: 'OPC340', opcId: 386 },
{ instanceId: 423, opcName: 'OPC342', opcId: 388 },
{ instanceId: 425, opcName: 'OPC345', opcId: 391 },
];
// SELECT "state"->'history'->'context'->'createAccounting'
// FROM winstancestates
// WHERE id = 415;
const buildUpdate = ({ instanceId, opcName, opcId }) => `
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{context, createAccounting, id}', to_json(${Number(opcId)})::JSONB)
WHERE id = ${Number(instanceId)};
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{context, createAccounting, name}', '"${opcName}"')
WHERE id = ${Number(instanceId)};
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{history, event, data, id}', to_json(${Number(opcId)})::JSONB)
WHERE id = ${Number(instanceId)};
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{history, event, data, name}', '"${opcName}"')
WHERE id = ${Number(instanceId)};
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{history, _event, data, data, id}', to_json(${Number(opcId)})::JSONB)
WHERE id = ${Number(instanceId)};
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{history, _event, data, data, name}', '"${opcName}"')
WHERE id = ${Number(instanceId)};
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{history, context, createAccounting, id}', to_json(${Number(opcId)})::JSONB)
WHERE id = ${Number(instanceId)};
UPDATE winstancestates
SET
"state" = jsonb_set(("state")::JSONB, '{history, context, createAccounting, name}', '"${opcName}"')
WHERE id = ${Number(instanceId)};
`
const { writeFile } = require('fs/promises');
async function _() {
await writeFile(
`${ process.cwd() }/wfm_fix.sql`,
mapping.map(buildUpdate).join('\n\n'),
'utf-8'
)
}
_();