def main(): greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) count = 0 with open("ucd_china.js","w") as ucd_china: for id in (1, 2, 3, 6, 8): feed = 'feed/%s'%quote(TEMPLATE_UCD_CHINA_RSS%id) print feed for i in greader.feed(feed): ucd_china.write(dumps(i)+"\n") count += 1 print count
def main(): greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) count = 0 with open("ucd_china.js", "w") as ucd_china: for id in (1, 2, 3, 6, 8): feed = 'feed/%s' % quote(TEMPLATE_UCD_CHINA_RSS % id) print feed for i in greader.feed(feed): ucd_china.write(dumps(i) + "\n") count += 1 print count
def unread_update(greader=None): if greader is None: greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) feeds = greader.unread_feed() for feed in feeds: try: unread_feed_update(greader, feed) except: traceback.print_exc() continue greader.mark_as_read()
http://zhaolei.info/rss http://zhaqiang.com/feed/ http://zhengkun.info/feed/ http://zhongwenbin001.blog.sohu.com/rss http://zhouzq1975.wordpress.com/feed/ http://zhubin.cc/feed http://zhu.gd/feed/ http://zhupengfei.com/feed/ http://zqyin.wordpress.com/feed/ http://zwkufo.blog.163.com/rss/ http://zxyun.blog.163.com/rss/""" s = s.split() print s import _env import sys reload(sys) sys.setdefaultencoding('utf-8') from zkit.google.greader import Reader reader = Reader('42qu.com', '') for i in s: reader.subscribe(i) print i sys.stdout.flush()
import _env import sys reload(sys) sys.setdefaultencoding('utf-8') from zkit.google.greader import Reader reader = Reader('42qu.com', '') for i in reader.feed('feed/http://feed.feedzshare.com'): print i sys.stdout.flush() for i in reader.feed('feed/http://www.feedzshare.com/s/t/1/rss'): print i sys.stdout.flush() for i in reader.feed('feed/http://www.feedzshare.com/s/t/3/rss'): print i sys.stdout.flush()
def main(): greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) rss_subscribe(greader) unread_update(greader) rss_tag()
def rss_subscribe(greader=None): from zkit.google.findrss import get_rss_link_title_by_url rss_list = [] for i in Rss.where(gid=0): url = i.url.strip() #print url if not all((i.link, i.url, i.name)): rss, link, name = get_rss_link_title_by_url(url) #print link, name if rss: i.url = rss if link: i.link = link if not name: name = link.split('://', 1)[-1] if name: i.name = name i.save() rss_list.append(i) if rss_list: if greader is None: greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) for i in rss_list: #print i.url url = quote(i.url) try: greader.subscribe(url) i.gid = 1 i.save() except: traceback.print_exc() print i.url, i.user_id i.delete() try: #print i.url feed = 'feed/%s' % url user_id = i.user_id duplicator_set_by_user_id(user_id) rss_feed_update(greader.feed(feed), i.id, user_id, 1024) # greader.mark_as_read(feed) except: traceback.print_exc() for i in Rss.where('gid<0'): if greader is None: greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) try: greader.unsubscribe('feed/' + quote(i.url)) except: traceback.print_exc() print i.url, i.user_id i.delete()
#coding:utf-8 import _env from zkit.google.greader import Reader from config import GREADER_USERNAME, GREADER_PASSWORD from zkit.pprint import pprint reader = Reader(GREADER_USERNAME, GREADER_PASSWORD) result = [] for feed in reader.feed("feed/http://book.douban.com/feed/review/book"): pprint(feed) data = {} data['title'] = feed['title'] data['author'] = feed['author'] data['content'] = feed['content'] data['updated'] = feed['updated'] data['id'] = feed['id'] data['published'] = feed['published'] result.append(data) if __name__ == "__main__": pass
def rss_subscribe(greader=None): from zkit.google.findrss import get_rss_link_title_by_url rss_list = [] for i in Rss.where(gid=0): url = i.url.strip() #print url if not all((i.link, i.url, i.name)): rss, link, name = get_rss_link_title_by_url(url) #print link, name if rss: i.url = rss if link: i.link = link if not name: name = link.split('://', 1)[-1] if name: i.name = name i.save() rss_list.append(i) if rss_list: if greader is None: greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) for i in rss_list: #print i.url url = quote(i.url) try: greader.subscribe(url) i.gid = 1 i.save() except: traceback.print_exc() print i.url, i.user_id i.delete() try: #print i.url feed = 'feed/%s'%url user_id = i.user_id duplicator_set_by_user_id(user_id) rss_feed_update(greader.feed(feed), i.id, user_id, 1024) # greader.mark_as_read(feed) except: traceback.print_exc() for i in Rss.where('gid<0'): if greader is None: greader = Reader(GREADER_USERNAME, GREADER_PASSWORD) try: greader.unsubscribe('feed/'+quote(i.url)) except: traceback.print_exc() print i.url, i.user_id i.delete()