with PM2 & Nginx
as a root, run below commands on server:
# adduser tomy
curl
to get the JSON response for the latest releasegrep
to find the line containing file URLcut
and tr
to extract the URLwget
to download itcurl -s https://api.github.com/repos/jgm/pandoc/releases/latest \
| grep "browser_download_url.*deb" \
| cut -d : -f 2,3 \
| tr -d \" \
// This works on all devices/browsers, and uses IndexedDBShim as a final fallback | |
var indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB || window.shimIndexedDB; | |
// Open (or create) the database | |
var open = indexedDB.open("MyDatabase", 1); | |
// Create the schema | |
open.onupgradeneeded = function() { | |
var db = open.result; | |
var store = db.createObjectStore("MyObjectStore", {keyPath: "id"}); |
error_page 400 404 405 =200 @40*_json; | |
location @40*_json { | |
default_type application/json; | |
return 200 '{"code":"1", "message": "Not Found"}'; | |
} | |
error_page 500 502 503 504 =200 @50*_json; | |
location @50*_json { |
/* require XLSX */ | |
var XLSX = require('XLSX') | |
function datenum(v, date1904) { | |
if(date1904) v+=1462; | |
var epoch = Date.parse(v); | |
return (epoch - new Date(Date.UTC(1899, 11, 30))) / (24 * 60 * 60 * 1000); | |
} | |
function sheet_from_array_of_arrays(data, opts) { |
RDBMS-based job queues have been criticized recently for being unable to handle heavy loads. And they deserve it, to some extent, because the queries used to safely lock a job have been pretty hairy. SELECT FOR UPDATE followed by an UPDATE works fine at first, but then you add more workers, and each is trying to SELECT FOR UPDATE the same row (and maybe throwing NOWAIT in there, then catching the errors and retrying), and things slow down.
On top of that, they have to actually update the row to mark it as locked, so the rest of your workers are sitting there waiting while one of them propagates its lock to disk (and the disks of however many servers you're replicating to). QueueClassic got some mileage out of the novel idea of randomly picking a row near the front of the queue to lock, but I can't still seem to get more than an an extra few hundred jobs per second out of it under heavy load.
So, many developers have started going straight t
Only do this if you understand the consequences: all node programs will be able to bind on ports < 1024
sudo setcap 'cap_net_bind_service=+ep' /usr/local/bin/node
Important: your node location may vary. Use which node
to find it, or use it directly in the command:
https://github.com/djvirgen/virgen-acl Simple and elegant, create your own checks. No middleware?
https://github.com/OptimalBits/node_acl Use as middleware, create your own roles and access. Great choice.
https://github.com/tschaub/authorized Similar to connect roles... but a bit more robust? you can create roles and action, and associate many roles with that action
'use strict'; | |
var directives = angular.module('app', []); | |
// override the default input to update on blur | |
directives.directive('input', function () { | |
return { | |
restrict: 'E', | |
require: 'ngModel', | |
link: function (scope, elm, attr, ngModelCtrl) { | |
if (attr.type === 'radio' || attr.type === 'checkbox') return; |