http://www.xminder.com/number.check.php?number=5551234567
http://www.xminder.com/number.check.php?number=<10digitnumber>
'nuff said
Also, npm module for node.js:
http://www.xminder.com/number.check.php?number=5551234567
http://www.xminder.com/number.check.php?number=<10digitnumber>
'nuff said
Also, npm module for node.js:
#!/usr/bin/env ruby | |
require 'date' | |
require 'rubygems' | |
require 'libnotify' | |
require 'net/http' | |
require 'rexml/document' | |
require 'xmlsimple' | |
def every( time ) | |
Thread.new { | |
loop do |
<!DOCTYPE html> | |
<meta charset="utf-8"> | |
<style> | |
body { | |
margin: 0; | |
background: #000; | |
} | |
</style> |
var typify = require("typify"); | |
function checklog(type, v, expected) { | |
var result = typify.check(type,v); | |
if (result !== expected) { | |
console.error(type, v, result, expected); | |
} | |
} | |
var aNumber = 123; |
This gist assumes:
<?php | |
Class Keygen { | |
private $serial; | |
private function randChar($length = 8) { | |
$characters = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'; | |
$string = ''; | |
for ($p = 0; $p < $length; $p++) { |
## strike.probe.r | |
## a software for the analysis of strike data obtained from mantis shrimp | |
## https://gist.github.com/4180275 | |
library(R.matlab) | |
library(lattice) | |
library(timsac) | |
library(ggplot2) | |
working.dir <- "~/Dropbox/Analysis2/R_programming/strike_probe" |
function ForceData=autorec(duration, strikeThreshold, rewardThreshold) | |
%% DISCRIPTION | |
% This program automatically records foce data by detecting rising phase of | |
% the voltage change and it produce digital output whether the peak of the | |
% voltage above a threshold. The condition of output can be customized. | |
%% HOW TO USE | |
% The termination of this program is tricky. Please press "Ctrl - c" to | |
% quit the program and after that, command "daqreset" in the MATLAB prompt. |
On July 22, Github announced the 3rd Annual Github Data Challenge presenting multiple sources of data available.
This sounded to me a good opportunity to use their available data and import it in Neo4j in order to have a lot of fun at analyzing the data that fits naturally in a graph.
As I work mainly offline or behind military proxies that do not permit me to use the ReST API, I decided to go for the Github Archive available here, you can then download json files representing Github Events on a daily/hour basis.