Skip to content

Instantly share code, notes, and snippets.

@OzTamir
Created April 17, 2014 11:40
Show Gist options
  • Select an option

  • Save OzTamir/10976447 to your computer and use it in GitHub Desktop.

Select an option

Save OzTamir/10976447 to your computer and use it in GitHub Desktop.
Get some sweet images from reddit.com/r/EarthPorn (SFW)
import json
import urllib2 as urllib
import sys
def get_urls(limit=5):
'''Get the url and titles from /r/EarthPorn'''
# This is the api endpoint for hot submissions in /r/EarthPorn
api_url = 'http://www.reddit.com/r/EarthPorn/hot.json?limit=' + str(limit)
# Let's store this in urlopen
json_url = urllib.urlopen(api_url)
# And load the recived JSON into a variable
text = json.loads(json_url.read())
# Get only the relevent data from the recived JSON
links = text['data']['children']
urls = dict()
# Get the url and titles into a dictionry
for i, val in enumerate(links):
url = val['data']['url']
# I've enconered few encoding error - no body want those
try:
title = val['data']['title']
except:
title = str(i + 1)
# Make sure it's an actual image
if url[-4] == '.':
urls[title] = url
return urls
def save_pictures(urls):
'''Download each picture in the URLS dict and save it with the proper name and format'''
for title, url in urls.items():
# Filename = Index.format
print 'Now Downloading: ' + title
# Once again - godamn encoding errors
try:
filename = str(title) + '.' +str(url.split('.')[-1])
except:
continue
with open(filename, 'wb') as file:
file.write(urllib.urlopen(url).read())
print 'Done.'
if __name__ == '__main__':
if len(sys.argv) < 2 or not sys.argv[1].isdigit():
url_dict = get_urls()
elif int(sys.argv[1]) > 99:
print 'Error: Maximum allowed is 100 images. Please try again.'
sys.exit(1)
else:
url_dict = get_urls(int(sys.argv[1]))
save_pictures(url_dict)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment