コード例 #1
0
ファイル: base.py プロジェクト: thakadu/Abraxas
 def __call__(self, environ, start_response):
     """Invoke the Controller"""
     # WSGIController.__call__ dispatches to the Controller method
     # the request is routed to. This routing information is
     # available in environ['pylons.routes_dict']
     try:
         return WSGIController.__call__(self, environ, start_response)
     finally:
         Session.remove()
コード例 #2
0
ファイル: entry.py プロジェクト: thakadu/Abraxas
 def tag(self, tags):
     """Given a single tag or a list of tags adds them to the entry"""
     if isinstance(tags, str):
         tags = [tags]
     for tag in tags:
         tag_ = Tag()
         tag_.keyword = tag
         tag_.lower = tag.lower()
         tag_.entry_id = self.id
         Session.add(tag_)
     Session.commit()
コード例 #3
0
ファイル: __init__.py プロジェクト: thakadu/Abraxas
def init_model(engine):
    """Call me before using any of the tables or classes in the model"""
    Session.configure(bind=engine)

    fetch_table = sa.Table('fetch', meta.metadata, autoload=True, autoload_with=engine)
    Fetch.table = fetch_table
    orm.mapper(Fetch, Fetch.table)

    feed_table = sa.Table('feed', meta.metadata, autoload=True, autoload_with=engine)
    Feed.table = feed_table
    orm.mapper(Feed, Feed.table)

    entry_table = sa.Table('entry', meta.metadata, autoload=True, autoload_with=engine)
    Entry.table = entry_table
    orm.mapper(Entry, Entry.table)

    tag_table = sa.Table('tag', meta.metadata, autoload=True, autoload_with=engine)
    Tag.table = tag_table
    orm.mapper(Tag, Tag.table)
コード例 #4
0
ファイル: tag.py プロジェクト: thakadu/Abraxas
 def popular():
     """Returns the most popular recent tags"""
     log.info('*******popular method called in Tag class******')
     ntags = int(config.get('ntags', 20))
     hours = int(config.get('popular_tags_window', 72))
     s = text('''
         select lower, count(*) tagcount from tag
         where created > now() - interval :hours hour
         group by lower order by tagcount desc, lower 
         limit :limit
     ''')
     tags = Session.execute(s, dict(limit=ntags,hours=hours)).fetchall()
     return tags
コード例 #5
0
ファイル: entry.py プロジェクト: thakadu/Abraxas
 def index(self, format='html', page=0, tag=None):
     cols = entry_table.c
     query = select([
         cols.id,
         cols.title,
         cols.url,
         cols.tags,
         cols.pubtime,
         cols.host,
         cols.feed_title
     ])
     order_by = cols.pubtime.desc()
     query = query.limit(c.pagesize).offset(c.slicestart)
     query = query.order_by(order_by)
     c.links = Session.execute(query).fetchall()
     c.view = 'latest'
     return render('/links_960.mako')
コード例 #6
0
ファイル: entry.py プロジェクト: thakadu/Abraxas
 def tag(self, keyword, format='html', page=0):
     j = tag_table.join(entry_table, tag_table.c.entry_id==entry_table.c.id)
     cols = entry_table.c
     query = select([
         cols.id,
         cols.title,
         cols.url,
         cols.tags,
         cols.pubtime,
         cols.host,
         cols.feed_title
     ], from_obj=j)
     query = query.where(tag_table.c.lower==keyword.lower())
     query = query.limit(c.pagesize)
     query = query.offset(c.slicestart)
     query = query.order_by(cols.pubtime.desc())
     c.links = Session.execute(query).fetchall()
     c.view = 'tag/%s' % keyword
     return render('/links_960.mako')
コード例 #7
0
ファイル: fetch_feeds.py プロジェクト: thakadu/Abraxas
def fetch(feed):
    """Fetches entries from a feed and stores them in the database"""
    d = feedparser.parse(feed.url)
    got_entries = len(d['entries'])
    fetch_ = Fetch()
    fetch_.feed_id = feed.id
    fetch_.result = str(got_entries)
    Session.add(fetch_)
    feed.last_fetched_at = datetime.datetime.now()
    Session.add(feed)
    count = 0
    for e in d['entries']:
        url = e.get('link')
        exists = Session.query(Entry).filter_by(url=url).first()
        if exists: 
            continue
        title = e.get('title')
        
        # Try to get a published time, differs widely by feed..  
        published = e.get('published_parsed')
        if not published:
            published = e.get('updated_parsed')
        if not published:
            published = e.get('created_parsed')
        if not published:
            # If all aobe failed we will just use current gmtime
            published = time.gmtime()
        
        # Now convert published to a datetime     
        published = datetime.datetime(*published[:6])

        summary = e.get('summary')
        
        # Now save the entry into the db...
        entry = Entry()
        entry.feed_id = feed.id
        entry.title = title
        entry.feed_title = feed.title
        entry.url = url
        entry.pubtime = published
        entry.summary = summary
        entry.host = get_host(feed.weburl)
        Session.add(entry)
        Session.commit()
        count += 1
        
    Session.commit()
    return count
コード例 #8
0
ファイル: fetch_feeds.py プロジェクト: thakadu/Abraxas
                      help=help_,
                      action='store_true',
                      default=False)
    (options, args) = parser.parse_args()

    conf = appconfig('config:' + options.ini, relative_to='.')
    load_environment(conf.global_conf, conf.local_conf)

    engine = create_engine(conf['sqlalchemy.url'], echo=True)
    meta = MetaData()
    conn = engine.connect()

    feed_table = sa.Table('feed', meta, autoload=True, autoload_with=engine)
    query = select([feed_table])

    feeds = Session.query(Feed)
    for f in feeds:
        if f.last_fetched_at:
            ux_time_last_fetched = time.mktime(f.last_fetched_at.timetuple())
            print "last fetched: %s" % \
                (datetime.datetime.now() - f.last_fetched_at)
            seconds_ago = \
                unix_time(datetime.datetime.now()) - ux_time_last_fetched
        else:
            seconds_ago = 99999999
        print seconds_ago
        new = 0
        if seconds_ago < (throttle * 60) and not options.force:
            print "Not updating %s because inside throttle time." % f.title
        else:
            #try:
コード例 #9
0
ファイル: tag_entries.py プロジェクト: thakadu/Abraxas
    parser.add_option('--ini',
                      help='INI file to use for application settings',
                      type='str',
                      default='development.ini')
    help_ = 'Force a download of the feed even if its within throttle time.'
    parser.add_option('--force',
                      help=help_,
                      action='store_true',
                      default=False)
    (options, args) = parser.parse_args()

    conf = appconfig('config:' + options.ini, relative_to='.')
    load_environment(conf.global_conf, conf.local_conf)

    engine = create_engine(conf['sqlalchemy.url'], echo=True)
    meta = MetaData()
    conn = engine.connect()

    entries = Session.query(Entry).filter_by(tags=None)
    for e in entries:
        text = e.title
        tags = tag(text)
        print tags
        e.tags = ' '.join(tags)
        e.tag(tags)
        Session.add(e)
    Session.commit()