I hereby claim:
- I am tlhunter on github.
- I am tlhunter (https://keybase.io/tlhunter) on keybase.
- I have a public key whose fingerprint is 34FA 158E 563B CEF7 816E DB79 3B71 1C30 7DA2 4AA1
To claim this, I am signing this object:
#!/usr/bin/env node | |
var amqp = require('amqplib'); | |
var exchange_name = 'pubsub'; | |
amqp.connect('amqp://localhost').then(function(conn) { | |
process.once('SIGINT', function() { conn.close(); }); | |
return conn.createChannel().then(function(channel) { |
I hereby claim:
To claim this, I am signing this object:
var readline = require('readline').createInterface({ | |
input: process.stdin, | |
output: process.stdout | |
}); | |
// CARD CLASS | |
var Card = function(face, suite) { | |
if (this.faces.indexOf(face) == -1) { | |
throw new Error("Invalid Face: " + face); |
<?php | |
class MyCoolClass { | |
/** | |
* @var $data array How does one specify this array needs a 'subelement' key? | |
*/ | |
public function performAction($data) { | |
echo $data['requiredkey']; | |
} | |
} |
app.get('/health', function(req, res){ | |
res.send({ | |
pid: process.pid, | |
memory: process.memoryUsage(), | |
uptime: process.uptime(), | |
connections: server.connections | |
}); | |
}); |
# Yes, openssl-devel is installed. | |
$ npm install bcrypt | |
npm http GET https://registry.npmjs.org/bcrypt | |
npm http 304 https://registry.npmjs.org/bcrypt | |
npm http GET https://registry.npmjs.org/bindings/1.0.0 | |
npm http 304 https://registry.npmjs.org/bindings/1.0.0 | |
> [email protected] install /home/thunter/unified-api/node_modules/bcrypt | |
> node-gyp rebuild |
<?php | |
include("mysql.ssi.php"); | |
#Naming conventions: http://support.eve-online.com/Pages/KB/Article.aspx?id=37 | |
#Sample page format: http://www.example.com/?a=kill_detail&kll_id=1000000 | |
# This is a script I wrote a few years ago to scrape Eve Online kill mails. I ran it on a single | |
# core 2.4 Ghz with 2GB of RAM for a week and it scraped a couple million kills. It stores data | |
# in a relational format, but I've since lost the schema file. It can be easily reverse | |
# engineered from this file though. It would scrape sequentially, it should have run multiple | |
# requests in parallel for better efficiency. |
<?php | |
ini_set("error_reporting", E_ALL & ~E_NOTICE); | |
?> | |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> | |
<html xmlns="http://www.w3.org/1999/xhtml"> | |
<head> | |
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> | |
<title>Genetic Algorithm : TSP : PHP Implementation by Thomas Hunter</title> | |
<style> | |
body { |
<?php | |
/* | |
SofaDB, a pure PHP CouchDB alternative | |
Developed by Thomas Hunter | |
Released under the LGPL | |
May 1st, 2010 | |
Version 0.0.1 | |
This was something I started as a joke, basically a document storage system which | |
stores files on disk in the form of ID.json. It was never heavily tested... | |
*/ |
<?php | |
session_start(); | |
?> | |
<html> | |
<head> | |
<title><?php echo $_SERVER['SERVER_NAME']; ?></title> | |
<style> | |
body { | |
background-color: #000; | |
margin: 10px; padding: 0px; |