Skip to content

Instantly share code, notes, and snippets.

@tkf
Created April 12, 2011 15:41
Show Gist options
  • Select an option

  • Save tkf/915752 to your computer and use it in GitHub Desktop.

Select an option

Save tkf/915752 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
"""
Post list of URLs with tag(s) to Read It Later
"""
import getpass
from readitlater import API
class Config(object):
def __init__(self, username, password):
self.username = username
self.password = password
def setup_ril(apikey=None, username=None, password=None):
if apikey is None:
apikey = raw_input('API key: ')
if username is None:
username = raw_input('Username: ')
if password is None:
password = getpass.getpass()
return (API(apikey), Config(username, password))
def post(rilapi, config, data, tags=None, query_limt=None):
new = [dict(url=row['url'], title=row['title']) for row in data]
if tags:
if not isinstance(tags, str):
tags = ','.join(tags)
update_tags = [dict(url=row['url'], tags=tags) for row in data]
else:
update_tags = None
if query_limt is None or len(new) + len(update_tags) <= query_limt:
rilapi.send(config.username, config.password,
new=new, update_tags=update_tags)
else:
sned_with_limt(rilapi, config, 'new', new, query_limt)
if update_tags:
sned_with_limt(rilapi, config, 'update_tags', update_tags,
query_limt)
def sned_with_limt(rilapi, config, key, data, query_limt):
i = 0
sub = data[i * query_limt: (i + 1) * query_limt]
while sub:
rilapi.send(config.username, config.password, **{key: sub})
sub = data[i * query_limt: (i + 1) * query_limt]
i += 1
def read_urls(urlfile, sep=' '):
r"""
Read URLs from given iterative
Examples
--------
>>> url_raw_data = '''
... url_0 title_0
... url_1
... url_2 title 2 is with spaces
... '''
>>> data = read_urls(url_raw_data.split('\n'))
>>> print_url_data(data)
url_0
title_0
<BLANKLINE>
url_1
None
<BLANKLINE>
url_2
title 2 is with spaces
<BLANKLINE>
"""
data = []
for rawline in urlfile:
row = rawline.strip().split(sep, 1)
if not row[0]: # if row[0] is empty, skip this row
continue
if len(row) == 1:
data.append(dict(url=row[0], title=None))
else:
data.append(dict(url=row[0], title=row[1]))
return data
def print_url_data(data):
for row in data:
print row['url']
print row['title']
print
def main():
from optparse import OptionParser
parser = OptionParser(usage='%prog [options] [URLFILE]',
description=__doc__)
parser.add_option("-t", "--tags", type="str")
parser.add_option("-a", "--apikey", default=None, type="str")
parser.add_option("-u", "--username", default=None, type="str")
parser.add_option("-p", "--password", default=None, type="str")
parser.add_option("-q", "--query-limit", default=10, type="int",
help="item per query. set 0 for no limit. (default: 10)")
parser.add_option("--dry-run-read-url", default=False, action="store_true",
help="just print loaded urls (and titles if exist)")
(opts, args) = parser.parse_args()
if len(args) == 0:
import sys
urlfile = sys.stdin
else:
urlfile = file(args[0])
data = read_urls(urlfile)
urlfile.close()
if opts.dry_run_read_url:
print_url_data(data)
return
(rilapi, config) = setup_ril(opts.apikey, opts.username, opts.password)
post(rilapi, config, data, opts.tags, opts.query_limit)
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment