def action(handler): user = users.User(handler.request.get('user')) articleDate = datetime.datetime.strptime(handler.request.get('date'), "%Y-%m-%dT%H:%M:%S") feedFilter = handler.request.get('feed') showFilter = handler.request.get('show') query = feeds.Status.all().filter('user = '******'articleDate').filter('articleDate > ', articleDate) if not showFilter == 'all': query.filter('read = ', datetime.datetime.max) if feedFilter: query.filter('feedName = ', feedFilter) stat = query.get() sub = feeds.Subscription.all().filter('user = '******'feedUrl = ', stat.feedUrl).get() if stat else None if sub: feeds.get_article_content(stat.articleUrl, stat.articleGuid, sub)
def action(context): user = context['user'] subid = context['path_parameters'].get('feed') artid = context['path_parameters'].get('read') articleUrl = filters.decode_segment(artid[4:]) sub = feeds.redis.hgetall(subid) art = feeds.redis.hgetall(artid) if not art: return context['request'].get_response(webob.exc.HTTPNotFound()) art['subid'] = subid art['artid'] = artid art['feedName'] = sub['feedName'] if sub else '' art['articleDate'] = str(datetime.datetime.utcfromtimestamp(float(art['date']))) art['articleUrl'] = articleUrl articleGuid = art.get('guid') if art else None context['content'] = feeds.get_article_content(articleUrl, articleGuid, sub, []) context['article'] = feeds.makeUnicode(art) # prefetch the next article if not context['parameters'].get('prefetch'): params = dict() params['feed'] = context['parameters'].get('feed', '') params['show'] = context['parameters'].get('show', '') params['date'] = art['date'] params['skip::'] = 1 params['prefetch'] = 1 cookie = context['request'].headers.get('Cookie', '') fetch = threading.Thread(target=prefetch, args=(params, cookie)) fetch.daemon = True fetch.start()
def action(handler): user = users.get_current_user() articleUrl = handler.context['path_parameters']['read'] stat = feeds.Status.all().filter('user = '******'articleUrl = ', articleUrl).get() articleGuid = stat.articleGuid if stat else None sub = feeds.Subscription.all().filter('user = '******'feedUrl = ', stat.feedUrl).get() if stat else None handler.context['article'] = stat handler.context['content'] = feeds.get_article_content(articleUrl, articleGuid, sub) # prefetch the next article if stat: params = dict() params['user'] = user.email() params['date'] = stat.articleDate.isoformat() feedFilter = handler.request.get('feed') if feedFilter: params['feed'] = feedFilter if handler.request.get('show') == 'all': params['show'] = 'all' taskqueue.add(url='/admin/prefetch.html', params=params)
def action(handler): sub = None user = users.get_current_user() lst = [] if handler.request.get('test') else None feedUrl = handler.context['path_parameters'].get('feed', None) if feedUrl: sub = feeds.Subscription.all().filter('user = '******'feedUrl = ', feedUrl).get() if not sub: handler.sendError(404) return True next = handler.request.get('next') if not next: next = handler.request.headers.get('Referer') if not next: next = '../' if feedUrl else '../feed/' handler.context['next'] = next if handler.method == 'POST': if handler.request.get('cancel'): handler.sendRedirect(next) return True if handler.request.get('delete'): if sub: sub.delete() handler.sendRedirect(next) return True feedUrl = handler.request.get('feedUrl')[0:1024].strip() feedName = handler.request.get('feedName')[0:16].strip() if not feedUrl: handler.context.setdefault('errors', []).append('Url is required') elif not feedName: handler.context.setdefault('errors', []).append('Name is required') else: if not sub: sub = feeds.Subscription() sub.user = users.get_current_user() sub.feedUrl = feedUrl sub.feedName = feedName sub.useGuid = True if handler.request.get('useGuid') else False sub.prefixRemove = handler.request.get('prefixRemove')[0:1024].strip() sub.prefixAdd = handler.request.get('prefixAdd')[0:1024].strip() sub.suffixRemove = handler.request.get('suffixRemove')[0:1024].strip() sub.suffixAdd = handler.request.get('suffixAdd')[0:1024].strip() sub.xpath = handler.request.get('xpath')[0:8096].strip() sub.extra = ','.join(handler.request.get_all('extra'))[0:1024].strip() if lst is None: sub.put() feed = feeds.Feed.all().filter('feedUrl = ', feedUrl).get() if not feed: feed = feeds.Feed() feed.feedUrl = feedUrl feed.accessed = datetime.datetime.utcnow() feed.put() handler.sendRedirect(next) return True else: try: lst.append('fetching and parsing ' + sub.feedUrl) parser = feedparser.parse(sub.feedUrl) if hasattr(parser, 'status'): lst.append('status ' + str(parser.status)) elif hasattr(parser, 'bozo_exception'): lst.append(str(parser.bozo_exception)) else: lst.append('feed error') if not (hasattr(parser, 'entries') and parser.entries): lst.append('feed has no entries') else: lst.append('processing sample entry...') entry = parser.entries[0] articleUrl = entry.get('link', '') articleGuid = entry.get('guid', articleUrl) feeds.get_article_content(articleUrl, articleGuid, sub, lst) except Exception, e: lst.append('exception:') lst.append(str(e)) handler.context['testout'] = '\n'.join(lst)
def action(context): sub = None lst = [] if context['parameters'].get('test') else None subid = context['path_parameters'].get('feed') if subid: sub = feeds.redis.hgetall(subid) if not sub: return context['request'].get_response(webob.exc.HTTPNotFound()) location = context['parameters'].get('next') if not location: location = context['request'].headers.get('Referer') if not location: location = '../' if subid else '../feed/' context['next'] = location if context['request'].method == 'POST': if context['parameters'].get('cancel'): return context['request'].get_response(webob.exc.HTTPFound(location=location)) if context['parameters'].get('delete'): if sub: feeds.redis.srem(context['user'] + "/subs", subid) feeds.redis.delete(subid) return context['request'].get_response(webob.exc.HTTPFound(location=location)) feedUrl = context['parameters'].get('feedUrl')[0:1024].strip() feedName = context['parameters'].get('feedName')[0:16].strip() if not feedUrl: context.setdefault('errors', []).append('Url is required') elif not feedName: context.setdefault('errors', []).append('Name is required') else: sub = {} sub['user'] = context['user'] sub['feedUrl'] = feedUrl sub['feedName'] = feedName sub['useGuid'] = '1' if context['parameters'].get('useGuid') else '0' sub['prefixRemove'] = context['parameters'].get('prefixRemove')[0:1024].strip() sub['prefixAdd'] = context['parameters'].get('prefixAdd')[0:1024].strip() sub['suffixRemove'] = context['parameters'].get('suffixRemove')[0:1024].strip() sub['suffixAdd'] = context['parameters'].get('suffixAdd')[0:1024].strip() sub['xpath'] = context['parameters'].get('xpath')[0:8096].strip() sub['extra'] = ','.join(context['request'].params.getall('extra'))[0:1024].strip() if lst is None: if subid: feeds.redis.hmset(subid, sub) else: subid = "sub/" + str(feeds.redis.incr("ids/sub")) feeds.redis.hmset(subid, sub) feeds.redis.sadd(context['user'] + "/subs", subid) feeds.redis.zadd("feeds", time.time(), feedUrl) return context['request'].get_response(webob.exc.HTTPFound(location=location)) else: try: lst.append('fetching and parsing ' + sub['feedUrl'] + '\n') parser = feedparser.parse(sub['feedUrl']) if hasattr(parser, 'status'): lst.append('status ' + str(parser.status) + '\n') elif hasattr(parser, 'bozo_exception'): lst.append(str(parser.bozo_exception) + '\n') else: lst.append('feed error\n') if not (hasattr(parser, 'entries') and parser.entries): lst.append('feed has no entries\n') else: lst.append('processing sample entry...\n') entry = parser.entries[0] articleUrl = entry.get('link', '') articleGuid = entry.get('guid', articleUrl) feeds.get_article_content(articleUrl, articleGuid, sub, lst) except Exception, e: lst.append('exception:\n') lst.append(str(e)) lst.append('\n') print traceback.format_exc() context['testout'] = ''.join(lst)