Ejemplo n.º 1
0
def save(save_id=0):
    db_session.query(Item).filter(Item.item_id==save_id).update({Item.saved: True}, synchronize_session='fetch')
    db_session.commit()
    if save_id:
        return render_template('status.html', messages=['item(%s) saved' % save_id])
    flash('Successfully saved item')
    return redirect(request.referrer or '/')
Ejemplo n.º 2
0
def add(source_name, item_name, content, url=None, attributes=None):
    if not attributes: attributes = {}
    source = db_session.query(Source).filter(Source.name==source_name).first() or Source(name=source_name, source_type='cmdline', address='/query/_%s' % source_name)
    item = Item(item_name, content, url=url, attributes=attributes)
    db_session.add(source)
    source.items.append(item)
    db_session.commit()
    return item
Ejemplo n.º 3
0
def add_source():
    form = SourceForm(request.form)
    if request.method == 'POST' and form.validate():
        s = Source(form.name.data, form.source_type.data, form.address.data)
        db_session.add(s)
        db_session.commit()
        flash('Source "%s" added' % form.name.data)
        return redirect(request.referrer or '/')
    return render_template('add_source.html', form=form)
Ejemplo n.º 4
0
def archive():
    try:
        ids = map(int, request.form.get('ids', '').split(','))
    except:
        flash('Bad params')
        return redirect(request.referrer or '/')
    db_session.query(Item).filter(Item.item_id.in_(ids)).update({Item.archived: True}, synchronize_session='fetch')
    db_session.commit()
    flash('Successfully archived items: %d' % len(ids))
    return redirect(request.referrer or '/')
Ejemplo n.º 5
0
Archivo: feed.py Proyecto: stef/potion
def parseFeed(feed):
    counter = 0
    #modified = feed['modified'].timetuple() if feed.get('modified') else None
    f = None
    f = parse(fetchFeed(feed.address)
             ,etag      = feed.attributes.get('etag')
             ,modified  = feed.attributes.get('modified')
             )
    if not f:
        print '[EE] cannot parse %s - %s' % (feed.name, feed.address)
        return counter
    #print '[!] parsing %s - %s' % (feed.name, feed.url)
    try:
        feed.attributes['etag'] = f.etag
    except AttributeError:
        pass
    try:
        feed.attributes['modified'] = f.modified
    except AttributeError:
        pass
    d = feed.updated
    for item in reversed(f['entries']):
        try:
           u = urlSanitize(item['links'][0]['href'])
        except:
           u = ''
        # checking duplications
        if db_session.query(Item).filter(Item.source_id==feed.source_id).filter(Item.url==u).first():
            continue

        try:
            tmp_date = datetime(*item['updated_parsed'][:6])
        except:
            tmp_date = datetime.now()

        # title content updated
        try:
            c = unicode(''.join([x.value for x in item.content]))
        except:
            c = u'[EE] No content found, plz check the feed (%s) and fix me' % feed.name
            for key in ['media_text', 'summary', 'description', 'media:description']:
                if item.has_key(key):
                    c = unicode(item[key])
                    break

        t = unicode(item.get('title','[EE] Notitle'))

        # date as tmp_date?!
        feed.items.append(Item(t, c, url=u, attributes={'date':tmp_date}))
        db_session.commit()
        counter += 1
    feed.updated = d
    db_session.commit()
    #feed.save()
    return counter
Ejemplo n.º 6
0
def add(source_name, item_name, content, url=None, attributes=None):
    if not attributes: attributes = {}
    source = db_session.query(Source).filter(
        Source.name == source_name).first() or Source(
            name=source_name,
            source_type='cmdline',
            address='/query/_%s' % source_name)
    item = Item(item_name, content, url=url, attributes=attributes)
    db_session.add(source)
    source.items.append(item)
    db_session.commit()
    return item
Ejemplo n.º 7
0
def sources():
    form = SourceForm(request.form)
    if request.method == 'POST' and form.validate():
        try:
            s = Source(form.name.data, form.source_type.data, form.address.data)
            db_session.add(s)
            db_session.commit()
        except Exception, e:
            flash('[!] Insertion error: %r' % e)
            db_session.rollback()
            return redirect('/sources')
        flash('Source "%s" added' % form.name.data)
        return redirect(request.referrer or '/')
Ejemplo n.º 8
0
def source_modify(s_id=0):
    source=Source.query.get(s_id)
    form=SourceForm(obj=source)
    if request.method == 'POST' and form.validate():
        source.name=form.name.data
        source.source_type=form.source_type.data
        source.address=form.address.data
        db_session.add(source)
        db_session.commit()
        flash('Source "%s" modified' % form.name.data)
        return redirect('/sources')
    return render_template('sources.html'
                          ,form     = form
                          ,sources  = Source.query.all()
                          ,mode     = 'modify'
                          ,menu_path= '/sources' #preserve menu highlight when paging
                          )
Ejemplo n.º 9
0
def archive(id=0):
    if request.method=='POST':
        try:
            ids = map(int, request.form.get('ids', '').split(','))
        except:
            flash('Bad params')
            return redirect(request.referrer or '/')
    elif id==0:
        flash('Nothing to archive')
        return redirect(request.referrer or '/')
    else:
        ids=[id]
    db_session.query(Item).filter(Item.item_id.in_(ids)).update({Item.archived: True}, synchronize_session='fetch')
    db_session.commit()
    if id:
        return render_template('status.html', messages=['item(%s) archived' % id])
    flash('Successfully archived items: %d' % len(ids))
    return redirect(request.referrer or '/')
Ejemplo n.º 10
0
def opml_import():
    url = request.args.get('url')
    if not url:
        return 'Missing url'
    import opml
    try:
        o = opml.parse(url)
    except:
        return 'Cannot parse opml file %s' % url

    def import_outline_element(o):
        for f in o:
            if hasattr(f,'xmlUrl'):
                s = Source(f.title,'feed',f.xmlUrl)
                db_session.add(s)
            else:
                import_outline_element(f)

    import_outline_element(o)
    db_session.commit()
    flash('import successed')
    return redirect(request.referrer or '/')
Ejemplo n.º 11
0
def del_source(s_id):
    Source.query.filter(Source.source_id==s_id).delete()
    db_session.commit()
    flash('Source removed')
    return redirect(request.referrer or '/')
Ejemplo n.º 12
0
def parseFeed(feed):
    counter = 0
    # modified = feed['modified'].timetuple() if feed.get('modified') else None
    f = None
    f = parse(fetchFeed(feed.address), etag=feed.attributes.get("etag"), modified=feed.attributes.get("modified"))
    if not f:
        print "[EE] cannot parse %s - %s" % (feed.name, feed.address)
        return counter
    # print '[!] parsing %s - %s' % (feed.name, feed.url)
    try:
        if feed.attributes["etag"] != f.etag:
            return
    except KeyError:
        pass

    try:
        feed.attributes["etag"] = f.etag
    except AttributeError:
        pass

    try:
        feed.attributes["modified"] = f.modified
    except AttributeError:
        pass

    d = feed.updated
    for item in reversed(f["entries"]):
        if "links" in item:
            original_url = unicode(item["links"][0]["href"])
        else:
            original_url = unicode(item["link"])

        # checking duplications
        if (
            db_session.query(Item)
            .filter(Item.source_id == feed.source_id)
            .filter(Item.original_url == original_url)
            .first()
        ):
            continue

        try:
            u = urlSanitize(original_url)
        except:
            u = ""

        try:
            tmp_date = datetime(*item["updated_parsed"][:6])
        except:
            tmp_date = datetime.now()

        # title content updated
        try:
            c = "".join([x.value for x in item.content])
        except:
            c = u"[EE] No content found, plz check the feed (%s) and fix me" % feed.name
            for key in ["media_text", "summary", "description", "media:description"]:
                if item.has_key(key):
                    c = item[key]
                    break

        # fixing malformed html
        if c:
            original = c
            c = ""
            try:
                phtml = etree.parse(StringIO(original), etree.HTMLParser())
                for node in phtml.iter("*"):
                    clean_description(node)
                for node in phtml.xpath("//body/*"):
                    c += etree.tostring(node)
            except:
                print u"[EE]description parsing error(%s - %s)" % (feed.name, u)
                c = original

        t = item.get("title", "[EE] Notitle")

        # date as tmp_date?!
        feed.items.append(Item(t, c, original_url, url=u, attributes={"date": tmp_date}))
        db_session.commit()
        counter += 1
    feed.updated = d
    db_session.commit()
    # feed.save()
    return counter
Ejemplo n.º 13
0
def parseFeed(feed):
    counter = 0
    #modified = feed['modified'].timetuple() if feed.get('modified') else None
    f = None
    f = parse(fetchFeed(feed.address)
             ,etag      = feed.attributes.get('etag')
             ,modified  = feed.attributes.get('modified')
             )
    if not f:
        print '[EE] cannot parse %s - %s' % (feed.name, feed.address)
        return counter
    #print '[!] parsing %s - %s' % (feed.name, feed.url)
    try:
        if feed.attributes['etag'] != f.etag:
            return
    except KeyError:
        pass

    try:
        feed.attributes['etag'] = f.etag
    except AttributeError:
        pass

    try:
        feed.attributes['modified'] = f.modified
    except AttributeError:
        pass

    d = feed.updated
    for item in reversed(f['entries']):
        if 'links' in item:
            original_url = unicode(item['links'][0]['href'])
        else:
            original_url = unicode(item['link'])

        # checking duplications
        if db_session.query(Item). \
                filter(Item.source_id==feed.source_id). \
                filter(Item.original_url==original_url).first():
            continue

        try:
           u = urlSanitize(original_url)
        except:
           u = ''

        try:
            tmp_date = datetime(*item['updated_parsed'][:6])
        except:
            tmp_date = datetime.now()

        # title content updated
        try:
            c = ''.join([x.value for x in item.content])
        except:
            c = u'[EE] No content found, plz check the feed (%s) and fix me' % feed.name
            for key in ['media_text', 'summary', 'description', 'media:description']:
                if item.has_key(key):
                    c = item[key]
                    break

#        #fixing malformed html
#        if c:
#            original = c
#            c = ''
#            try:
#                phtml = etree.parse(StringIO(original), etree.HTMLParser())
#                for node in phtml.iter('*'):
#                    clean_description(node)
#                for node in phtml.xpath('//body/*'):
#                    c += etree.tostring(node)
#            except:
#                print u'[EE]description parsing error(%s - %s)' % (feed.name, u)
#                c = original

        t = item.get('title','[EE] Notitle')

        # date as tmp_date?!
        feed.items.append(Item(t, c, original_url, url=u, attributes={'date':tmp_date}))
        db_session.commit()
        counter += 1
    feed.updated = d
    db_session.commit()
    #feed.save()
    return counter
Ejemplo n.º 14
0
def parseFeed(feed):
    counter = 0
    #modified = feed['modified'].timetuple() if feed.get('modified') else None
    f = None
    f = parse(fetchFeed(feed.address),
              etag=feed.attributes.get('etag'),
              modified=feed.attributes.get('modified'))
    if not f:
        print '[EE] cannot parse %s - %s' % (feed.name, feed.address)
        return counter
    #print '[!] parsing %s - %s' % (feed.name, feed.url)
    try:
        if feed.attributes['etag'] != f.etag:
            return
    except KeyError:
        pass

    try:
        feed.attributes['etag'] = f.etag
    except AttributeError:
        pass

    try:
        feed.attributes['modified'] = f.modified
    except AttributeError:
        pass

    d = feed.updated
    for item in reversed(f['entries']):
        if 'links' in item:
            original_url = unicode(item['links'][0]['href'])
        else:
            original_url = unicode(item['link'])

        # checking duplications
        if db_session.query(Item). \
                filter(Item.source_id==feed.source_id). \
                filter(Item.original_url==original_url).first():
            continue

        try:
            u = urlSanitize(original_url)
        except:
            u = ''

        try:
            tmp_date = datetime(*item['updated_parsed'][:6])
        except:
            tmp_date = datetime.now()

        # title content updated
        try:
            c = ''.join([x.value for x in item.content])
        except:
            c = u'[EE] No content found, plz check the feed (%s) and fix me' % feed.name
            for key in [
                    'media_text', 'summary', 'description', 'media:description'
            ]:
                if item.has_key(key):
                    c = item[key]
                    break

#        #fixing malformed html
#        if c:
#            original = c
#            c = ''
#            try:
#                phtml = etree.parse(StringIO(original), etree.HTMLParser())
#                for node in phtml.iter('*'):
#                    clean_description(node)
#                for node in phtml.xpath('//body/*'):
#                    c += etree.tostring(node)
#            except:
#                print u'[EE]description parsing error(%s - %s)' % (feed.name, u)
#                c = original

        t = item.get('title', '[EE] Notitle')

        # date as tmp_date?!
        feed.items.append(
            Item(t, c, original_url, url=u, attributes={'date': tmp_date}))
        db_session.commit()
        counter += 1
    feed.updated = d
    db_session.commit()
    #feed.save()
    return counter