Created
November 27, 2010 16:29
-
-
Save mizchi/718038 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #/usr/local/bin/python | |
| #-*- encoding:utf8 -*- | |
| LIVEDOOR_ID = "" | |
| PASSWORD = "" | |
| """ | |
| require: Growl simplejson | |
| """ | |
| import urllib, urllib2, cookielib | |
| import simplejson | |
| import re | |
| import os | |
| import pickle | |
| import Growl | |
| g = Growl.GrowlNotifier( | |
| applicationName='LDRNotify', notifications=['LDRNotify']) | |
| g.register() | |
| def notify(title, description): | |
| g.notify( | |
| noteType="LDRNotify", | |
| title=title, | |
| description=description, | |
| sticky=False) | |
| def psave(fname,obj): | |
| os.chdir(os.path.dirname(__file__)) | |
| pickle.dump(obj,open(fname,"w")) | |
| def pload(fname): | |
| os.chdir(os.path.dirname(__file__)) | |
| return pickle.load(open(fname)) | |
| API = 'http://reader.livedoor.com/api' | |
| cj = cookielib.CookieJar() | |
| opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) | |
| def login(): | |
| try: | |
| url ="http://member.livedoor.com/login/index" | |
| query = urllib.urlencode([("livedoor_id",LIVEDOOR_ID),("password",PASSWORD)]) | |
| res = opener.open(url, query).read() | |
| return True | |
| except: | |
| print False | |
| def get_unread_ids(): #未読記事一覧のidを取得 | |
| url =API+"/subs" | |
| query = urllib.urlencode([("unread","1")]) | |
| try: | |
| res = opener.open(url, query).read() | |
| return parse_json(res) | |
| except: | |
| print "IDs Fetch Error " | |
| return "FetchError" | |
| def get_unread_feeds(id): # 未読idから未読記事を返す | |
| url =API+"/unread" | |
| query = urllib.urlencode([("subscribe_id",id)]) | |
| try: | |
| res = opener.open(url, query).read() | |
| return parse_json(res) | |
| except: | |
| print "Feed Fetch Error " | |
| return "FetchError" | |
| def set_done(id): #指定IDのフィードを未読にする | |
| url =API+"/touch_all" | |
| query = urllib.urlencode([("subscribe_id",id)]) | |
| res = opener.open(url, query).read() | |
| return parse_json(res) | |
| def set_all_done(ids): | |
| for id in ids: | |
| set_done(id) | |
| def parse_json(doc): #テキストデータをutf-8のjsonオブジェクトに | |
| return simplejson.loads(unicode(doc,"utf-8","replace")) | |
| def get_unread_entries(): | |
| while not login(): | |
| print "Login Failure" | |
| links=[] | |
| text="" | |
| ids = get_unread_ids() | |
| if ids: | |
| for id in ids: | |
| title=id["title"].encode('utf_8') #+ i["body"].encode('utf_8') | |
| feed = get_unread_feeds(id["subscribe_id"]) #getUnreadFeedBy(i) | |
| for i in feed["items"]: | |
| links.append( | |
| [ | |
| "【"+title+ "】"+i["title"].encode("utf-8")+": "+ | |
| i["link"].encode("utf-8") | |
| , | |
| i["modified_on"] ]) | |
| else: | |
| print "No Feed " | |
| return sorted(links, key=lambda x:(x[1], x[0]),reverse=True) | |
| # def shortenURL(url) : | |
| # return bitly.shorten(url) | |
| def main(): | |
| try: | |
| last_id = pload("ldr.pickle") | |
| except: | |
| last_id = 0 | |
| current_list=get_unread_entries() | |
| for i in current_list: | |
| if i[1] < last_id: break | |
| source = re.findall("【.*】", i[0])[0] | |
| url = re.findall("http://.*$", i[0])[0] | |
| title = re.sub(url+"|"+source, "",i[0] ) | |
| notify(title=source, description=title) | |
| if current_list: | |
| last_id = current_list[0][1] | |
| psave("ldr.pickle", last_id) | |
| if __name__ == "__main__": | |
| main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment