Hi there!
The docker cheat sheet has moved to a Github project under https://github.com/wsargent/docker-cheat-sheet.
Please click on the link above to go to the cheat sheet.
| var spawn = require("child_process").spawn; | |
| var Q = require("q"); | |
| /** | |
| * Wrap executing a command in a promise | |
| * @param {string} command command to execute | |
| * @param {Array<string>} args Arguments to the command. | |
| * @param {string} cwd The working directory to run the command in. | |
| * @return {Promise} A promise for the completion of the command. | |
| */ |
| def _autofmthelper(name, fmt, postprocess=None): | |
| def fmtfunc(self): | |
| result = fmt.format(self=self) | |
| if postprocess is not None: | |
| result = postprocess(self, result) | |
| return result | |
| fmtfunc.__name__ = name | |
| return fmtfunc | |
| def autounicode(fmt): |
Hi there!
The docker cheat sheet has moved to a Github project under https://github.com/wsargent/docker-cheat-sheet.
Please click on the link above to go to the cheat sheet.
RDBMS-based job queues have been criticized recently for being unable to handle heavy loads. And they deserve it, to some extent, because the queries used to safely lock a job have been pretty hairy. SELECT FOR UPDATE followed by an UPDATE works fine at first, but then you add more workers, and each is trying to SELECT FOR UPDATE the same row (and maybe throwing NOWAIT in there, then catching the errors and retrying), and things slow down.
On top of that, they have to actually update the row to mark it as locked, so the rest of your workers are sitting there waiting while one of them propagates its lock to disk (and the disks of however many servers you're replicating to). QueueClassic got some mileage out of the novel idea of randomly picking a row near the front of the queue to lock, but I can't still seem to get more than an an extra few hundred jobs per second out of it under heavy load.
So, many developers have started going straight t
| # check if job exists | |
| curl -XGET 'http://jenkins/checkJobName?value=yourJobFolderName' --user user.name:YourAPIToken | |
| # with folder plugin | |
| curl -s -XPOST 'http://jenkins/job/FolderName/createItem?name=yourJobName' --data-binary @config.xml -H "Content-Type:text/xml" --user user.name:YourAPIToken | |
| # without folder plugin | |
| curl -s -XPOST 'http://jenkins/createItem?name=yourJobName' --data-binary @config.xml -H "Content-Type:text/xml" --user user.name:YourAPIToken | |
| # create folder |
| /// | |
| var pkg = require("./package.json") | |
| , rimraf = require("rimraf") | |
| , gulp = require("gulp") | |
| , gutil = require("gulp-util") | |
| , filter = require("gulp-filter") | |
| , plumber = require("gulp-plumber") | |
| , concat = require("gulp-concat") | |
| gulp.task("clean", function() { |
| library(ggplot2) | |
| library(maps) | |
| library(mapproj) | |
| ############################################################################### | |
| # Step 1: Get data from Foursquare | |
| # If you already have it, then great :) Otherwise, you can use RPI. The source | |
| # is listed below, and there are instructions for getting keys in the readme. | |
| # RPI: https://github.com/johnschrom/RPI |
| WITH table_scans as ( | |
| SELECT relid, | |
| tables.idx_scan + tables.seq_scan as all_scans, | |
| ( tables.n_tup_ins + tables.n_tup_upd + tables.n_tup_del ) as writes, | |
| pg_relation_size(relid) as table_size | |
| FROM pg_stat_user_tables as tables | |
| ), | |
| all_writes as ( | |
| SELECT sum(writes) as total_writes | |
| FROM table_scans |
| $editor_accent: #00abe6; | |
| // styles for heading_extended | |
| /* ------------------------------------------------------- */ | |
| .heading_level_selected { | |
| color: white; | |
| background: $editor_accent; | |
| } | |
| .heading_level_not_selected { | |
| color: $editor_accent; |
| # ag <https://github.com/ggreer/the_silver_searcher> | |
| # usage: ag-replace.sh [search] [replace] | |
| # caveats: will choke if either arguments contain a forward slash | |
| # notes: will back up changed files to *.bak files | |
| ag -0 -l $1 | xargs -0 perl -pi.bak -e "s/$1/$2/g" | |
| # or if you prefer sed's regex syntax: | |
| ag -0 -l $1 | xargs -0 sed -ri.bak -e "s/$1/$2/g" |