Skip to content

Instantly share code, notes, and snippets.

View taterbase's full-sized avatar

George Shank taterbase

View GitHub Profile
<!Doctype html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js"></script>
<script>
$(function(){
var htmlStr = String($('#js-item-template').html())
$('.js-item').remove();
"use strict";
Number.prototype.times = function ( fn ) {
var i;
for ( i = 0 ; i < this ; ++i)
fn();
};
class Array
def to_h
arr = self.collect do | val |
[ self.index(val), val]
end
Hash[arr]
@taterbase
taterbase / gist:4515802
Last active December 11, 2015 00:19
Whale of a comment
/* |\
.-""-._ \ \.--|
/ "-..__) .-'
ಠ_______ಠ /
\'-.__, .__.,'
`'----'._\--'
* Whale whale whale, what have we here?
* /
@taterbase
taterbase / es.sh
Created January 22, 2013 19:47 — forked from sgringwe/es.sh
cd ~
sudo apt-get update
sudo apt-get install openjdk-7-jre-headless -y
# Download the compiled elasticsearch rather than the source.
wget http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-0.20.2.tar.gz -O elasticsearch.tar.gz
tar -xf elasticsearch.tar.gz
rm elasticsearch.tar.gz
sudo mv elasticsearch-* elasticsearch
sudo mv elasticsearch /usr/local/share
function J = costFunctionJ(X, y, theta)
% X is the "design matrix" containg our training examples
% y is the class labels
m = size(X,1); % numbedr of training examples
predictions = X*theta; % predictions of hypothesis on all m examples
sqrErrors = (predictions - y) .^2; % squared errors
J= 1/(2*m) * sum(sqrErrors);
function [X_norm, mu, sigma] = featureNormalize(X)
%FEATURENORMALIZE Normalizes the features in X
% FEATURENORMALIZE(X) returns a normalized version of X where
% the mean value of each feature is 0 and the standard deviation
% is 1. This is often a good preprocessing step to do when
% working with learning algorithms.
% You need to set these values correctly
X_norm = X;
mu = zeros(1, size(X, 2));
function [theta, J_history] = gradientDescent(X, y, theta, alpha, num_iters)
%GRADIENTDESCENT Performs gradient descent to learn theta
% theta = GRADIENTDESENT(X, y, theta, alpha, num_iters) updates theta by
% taking num_iters gradient steps with learning rate alpha
% Initialize some useful values
m = length(y); % number of training examples
J_history = zeros(num_iters, 1);
for iter = 1:num_iters
{"response": {"status": {"version": "4.2", "code": 0, "message": "Success"}, "songs": [{"message": "query code length is too small", "error": "need co
des in query for fingerprint matching"}]}}
var five = require("johnny-five")
, board = new five.Board()
, leds = [2, 3, 4, 5, 6, 7, 8, 9]
board.on("ready", function() {
var led = new five.Led(6)
this.repl.inject({unlockDoor: unlockDoor.bind(this, led)})
})
function unlockDoor(led) {