def item_list(id): """Render all items from a given feed""" try: items = fc.get_items_from_feed(id) log.debug(items[0]) except: log.error(tb()) abort(500, "Error accessing items for feed") return {'items': items, 'title': id}
def add_feed(self, url, site_url=None, title=None, group=None): """Add a feed to the database""" # existing feed? try: f = Feed.get(Feed.url == url) except Feed.DoesNotExist: f = Feed.create(url=url, title=title, site_url=site_url) except: log.error(tb()) return f
def add_feed(self, url, site_url = None, title = None, group = None): """Add a feed to the database""" # existing feed? try: f = Feed.get(Feed.url == url) except Feed.DoesNotExist: f = Feed.create(url = url, title=title, site_url=site_url) except: log.error(tb()) return f
def item_list(id): """Render all items from a given feed""" try: items = fc.get_items_from_feed(id) log.debug(items[0]) except: log.error(tb()) abort(500,"Error accessing items for feed") return{'items':items, 'title': id}
def index(): """Render a feed index""" try: feeds = fc.get_feeds_with_counts() log.debug(feeds[0]) except: log.error(tb()) abort(500, "Error accessing feed data") headers = [ { 'label': 'Feed', 'field': 'title' }, { 'field': 'enabled', 'icon': 'icon-ok' }, { 'label': 'URL', 'field': 'url' }, { 'label': 'Site URL', 'field': 'site_url' }, { 'label': 'Modified', 'field': 'last_modified' }, { 'label': 'Checked', 'field': 'last_checked' }, { 'label': 'Status', 'field': 'last_status' }, { 'label': 'Items', 'field': 'item_count' }, ] return {'headers': headers, 'feeds': feeds[1:10], 'title': 'feeds'}
def expand_links(links): """Try to expand a link without locking the database""" result = {} for l in links: (schema, netloc, path, params, query, fragment) = urlparse.urlparse(l) if netloc and schema in ['http','https']: try: link = Link.get(url = l) result[l] = link.expanded_url except Link.DoesNotExist: expanded_url = expand(l, timeout = settings.fetcher.link_timeout) try: Link.create(url = l, expanded_url = expanded_url, when = time.time()) except: log.error(tb()) result[l] = expanded_url else: result[l] = l return result
def index(): """Render a feed index""" try: feeds = fc.get_feeds_with_counts() log.debug(feeds[0]) except: log.error(tb()) abort(500,"Error accessing feed data") headers = [ {'label': 'Feed', 'field': 'title'}, {'field': 'enabled', 'icon': 'icon-ok'}, {'label': 'URL', 'field': 'url'}, {'label': 'Site URL', 'field': 'site_url'}, {'label': 'Modified', 'field': 'last_modified'}, {'label': 'Checked', 'field': 'last_checked'}, {'label': 'Status', 'field': 'last_status'}, {'label': 'Items', 'field': 'item_count'}, ] return {'headers': headers, 'feeds': feeds, 'title': 'feeds'}
def expand_links(links): """Try to expand a link without locking the database""" result = {} for l in links: (schema, netloc, path, params, query, fragment) = urlparse.urlparse(l) if netloc and schema in ['http', 'https']: try: link = Link.get(url=l) result[l] = link.expanded_url except Link.DoesNotExist: expanded_url = expand(l, timeout=settings.fetcher.link_timeout) try: Link.create(url=l, expanded_url=expanded_url, when=time.time()) except: log.error(tb()) result[l] = expanded_url else: result[l] = l return result
def agnostic_shortener(url): """A more flexible URL shortener""" services = { 'tinyurl.com':'/api-create.php?url=', 'is.gd' :'/api.php?longurl=', #'api.bit.ly':"http://api.bit.ly/shorten?version=2.0.1&%s&format=text&longUrl=" % BITLY_AUTH, 'api.tr.im' :'/api/trim_simple?url=' } for shortener in self.services.keys(): try: res = fetch(self.services[shortener] + urllib.quote(url)) shorturl = res['data'].strip() if ("Error" not in shorturl) and ("http://" + urlparse.urlparse(shortener)[1] in shorturl): return shorturl else: continue except: log.warn("%s: %s" % (tb(),url)) pass return url
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Copyright (c) 2012, Rui Carmo Description: Shared configuration data License: MIT (see LICENSE.md for details) """ import os, sys, platform, logging.config from utils import get_config, path_for, tb try: settings except NameError: for host in [platform.node(), 'default']: try: settings = get_config(path_for(os.path.join('etc','%s.json' % host))) except IOError: continue except Exception as e: if sys.stderr.isatty(): tb = tb() print >> sys.stderr, ("Error while loading %(host)s.json: %(tb)s" % locals()) else: log.error("Error while loading %(host)s.json: %(tb)s" % locals()) sys.exit(2) logging.config.dictConfig(dict(settings.logging)) log = logging.getLogger() log.info("Configuration for %s loaded." % host) break
for entry in result.entries: when = get_entry_timestamp(entry) # skip ancient feed items if (now - when) < settings.fetcher.max_history: continue guid = get_entry_id(entry) try: item = Item.get(guid=guid) # if item is already in database with same timestamp, then skip it # TODO: handle item content updates - potentially very expensive, we'll see later on continue except Item.DoesNotExist: pass except Exception, e: log.error(tb()) html = get_entry_content(entry) # stack these for commiting to the database below entries.append({ 'guid': guid, 'feed': feed, 'title': get_entry_title(entry), 'author': get_entry_author(entry, result.feed), 'html': html, 'url': entry.link, 'tags': get_entry_tags(entry), 'when': when }) if not len(entries):
Copyright (c) 2012, Rui Carmo Description: Shared configuration data License: MIT (see LICENSE.md for details) """ import logging import logging.config import os import platform import sys from utils import get_config, path_for, tb for host in [platform.node(), 'default']: try: settings = get_config(path_for(os.path.join('etc', '%s.json' % host))) except IOError: continue except Exception as e: tb = tb() print tb if sys.stderr.isatty(): print >> sys.stderr, ("Error while loading %(host)s.json: %(tb)s" % locals()) else: log.error("Error while loading %(host)s.json: %(tb)s" % locals()) sys.exit(2) logging.config.dictConfig(dict(settings.logging)) log = logging.getLogger() log.info("Configuration for %s loaded." % host) break
for entry in result.entries: when = get_entry_timestamp(entry) # skip ancient feed items if (now - when) < settings.fetcher.max_history: continue guid = get_entry_id(entry) try: item = Item.get(guid = guid) # if item is already in database with same timestamp, then skip it # TODO: handle item content updates - potentially very expensive, we'll see later on continue except Item.DoesNotExist: pass except Exception, e: log.error(tb()) html = get_entry_content(entry) # stack these for commiting to the database below entries.append({'guid' : guid, 'feed' : feed, 'title' : get_entry_title(entry), 'author' : get_entry_author(entry,result.feed), 'html' : html, 'url' : entry.link, 'tags' : get_entry_tags(entry), 'when' : when}) if not len(entries): return