Skip to content

Instantly share code, notes, and snippets.

//api.js
var request = require('request-json');
var base_url = "http://192.168.1.165:8080/v1/"
var client = request.newClient(base_url);
exports.request = function (url) {
client.get(base_url+url, function (error, response, body) {
return body
})
};
class Array
def join_with_oxford_comma(delimiter=", ")
string = ""
if self.length == 1
string = self.first
elsif self.length == 2
string = self.join(" and ")
else
self.each_with_index do |elem,i|
if i == 0
class Array
def join_with_oxford_comma(delimiter=", ")
string = ""
if self.length == 1
string = self.first
elsif self.length == 2
string = self.join(" and ")
else
self.each_with_index do |elem,i|
if i == 0
@DGaffney
DGaffney / gist:5920273
Created July 3, 2013 16:39
Invite Codes
http://bit.ly/14NJ3cX
http://bit.ly/1b7HQSp
http://bit.ly/14NJ5S3
http://bit.ly/1b7HV8H
http://bit.ly/14NJa8n
http://bit.ly/1b7HYS3
http://bit.ly/14NJbJx
http://bit.ly/1b7HZW7
http://bit.ly/14NJd40
http://bit.ly/1b7HYBq
require 'open-uri'
require 'nokogiri'
url = "http://www.enxing.de/audio.html"
result = Nokogiri.parse(open(url))
mp3s = result.search("option").collect{|option| option.attributes["value"].value}.select{|link| link.include?(".mp3")}
mp3s.each do |mp3|
Thread.new{puts `wget #{mp3}`}
end
Book.all.each do |book|
book.number_of_pages = rand(10000)
end
require 'open-uri'
require 'json'
#without a gem, you can live in this world.
front_page = JSON.parse(open("http://www.reddit.com/.json").read)
#With a gem, you can get a little crazy...
require 'snoo'
# Create a new instance of the client
reddit = Snoo::Client.new
def index
limit = params[:limit] || 100
offset = params[:offset] || 0
@curations = []
if params[:dataset_id]
@curations = Dataset.first(:id => params[:dataset_id]).curations(:limit => limit.to_i, :offset => offset.to_i)
if params[:researcher_id]
@curations = Researcher.first(:id => params[:researcher_id]).curations(:limit => limit.to_i, :offset => offset.to_i)
else
@curations = Curation.all(:archived => false, :limit => limit.to_i, :offset => offset.to_i)
select count(distinct(screen_name)),geo_enabled from users where dataset_id in (200, 201, 202, 203, 204, 205) and twitter_id in (7524932, 8162322, 14274087, 14511951, 15077974, 15894020, 15914634, 16498195, 16529856, 17760781, 17784861, 18200615, 18460864, 18668136, 19035842, 19063141, 20400794, 20555999, 20579679, 21578875, 21587844, 21941984, 22120359, 22205904, 22218117, 22321520, 23376201, 23377569, 23479882, 23908773, 24287708, 24530900, 24966018, 25124859, 25366515, 25626174, 25738421, 25811689, 25822507, 25834094, 26008219, 26150677, 26171023, 26230418, 26273516, 26555800, 26602766, 26728695, 27335898, 27524029, 27570569, 27679698, 27765255, 27921386, 27937482, 27945993, 28369480, 28444084, 29077208, 29150921, 29178221, 29196038, 29209759, 29315398, 29546820, 29620526, 29743099, 29763551, 29881297, 29913428, 29939295, 30027454, 30035057, 30104425, 30431783, 30504450, 31133497, 31156433, 31349514, 31394603, 31513326, 31551422, 32134122, 32381290, 32699839, 32840980, 33050022, 33058177, 33170318, 3329064
Downloading s3://aws/twitter_data/2011-05-10/03-21-07.json.gz 2 at /ebs_home/dgaffney/SocialFlow-Twitter-Consumer/script/../lib/SocialFlowAPI/S3/Downloader.pm line 67.
-> /ebs_download/data/twitter/2011-05-10/03-21-07.json.gz 2 at /ebs_home/dgaffney/SocialFlow-Twitter-Consumer/script/../lib/SocialFlowAPI/S3/Downloader.pm line 67.
Downloading s3://aws/twitter_data/2011-05-10/06-41-07.json.gz 3 at /ebs_home/dgaffney/SocialFlow-Twitter-Consumer/script/../lib/SocialFlowAPI/S3/Downloader.pm line 67.
-> /ebs_download/data/twitter/2011-05-10/06-41-07.json.gz 3 at /ebs_home/dgaffney/SocialFlow-Twitter-Consumer/script/../lib/SocialFlowAPI/S3/Downloader.pm line 67.
Downloading s3://aws/twitter_data/2011-05-10/00-01-07.json.gz 1 at /ebs_home/dgaffney/SocialFlow-Twitter-Consumer/script/../lib/SocialFlowAPI/S3/Downloader.pm line 67.
-> /ebs_download/data/twitter/2011-05-10/00-01-07.json.gz 1 at /ebs_home/dgaffney/SocialFlow-Twitter-Consumer/script/../lib/SocialFlowAPI/S3/Downloader.pm line 67.
Downloadin