Skip to content

Instantly share code, notes, and snippets.

@johnLate
Last active August 29, 2015 14:18
Show Gist options
  • Save johnLate/52df62f60023f5ff9475 to your computer and use it in GitHub Desktop.
Save johnLate/52df62f60023f5ff9475 to your computer and use it in GitHub Desktop.
#!/usr/bin/python3
# based on http://www.reddit.com/r/programming/comments/31fuae/i_made_a_command_line_tool_to_pull_summaries_from/cq1fnq9
from urllib import request, parse
import json
import sys
if len(sys.argv) < 2:
print('Usage: {} <subject>'.format(sys.argv[0]))
sys.exit(1)
# http://en.wikipedia.org/w/api.php
# https://docs.python.org/3.4/library/urllib.parse.html#url-quoting
url = 'http://en.wikipedia.org/w/api.php?' + parse.urlencode([
('format', 'json'),
('action', 'query'),
('prop', 'extracts'),
('exintro', ''),
('explaintext', ''),
('titles', ' '.join(sys.argv[1:])),
('continue', ''),
('redirects', ''),
])
# http://meta.wikimedia.org/wiki/User-Agent_policy
req = request.Request(url, headers={
'User-Agent': 'WikiTermPy/0.1',
})
with request.urlopen(req) as f:
data = json.loads(f.read().decode('utf-8'))
for page in data['query']['pages'].values():
try:
print(page['title'])
print(page['extract'])
except KeyError:
print('No article found.')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment