コード例 #1
0
def update_fqs(_, fqs_id):
    source = FullyQualifiedSource.by_id(fqs_id)

    try:
        articles = source.get_links()

        for article in articles:
            # skip if article already posted
            if Link.by_slug(article["slug"]) is not None:
                continue
            link = Link(
                title=article["title"],
                slug=article["slug"],
                text=article["text"],
                url=article["url"],
                feed_id=source.feed_id,
                user_id=12345,
            )
            link.commit()
        source.next_update = datetime.now() + timedelta(
            seconds=source.update_interval)
        source.save()
    except Exception as e:
        flash("Could not parse the RSS feed on URL".format(source.url),
              "error")

    return redirect(redirect_back(source.feed.route))
コード例 #2
0
ファイル: import_fqs.py プロジェクト: matoous/newz
def import_fqs():
    from news.models.link import Link
    from news.models.fully_qualified_source import FullyQualifiedSource

    print("Importing Fully Qualified Sources")
    while True:
        # Get batch of FQS
        now = datetime.now()
        sources = (FullyQualifiedSource.where("next_update", "<",
                                              now).limit(BATCH_SIZE).get())

        # No FQS left to check
        if not sources or sources == []:
            print("Finished")
            break

        # Check FQS
        for source in sources:
            print("Source {}".format(source.url))
            try:
                articles = source.get_links()
            except Exception as e:
                print("couldn't get links for FQS {}, error: {}".format(
                    source.url, e))
                articles = []
            for article in articles:
                # skip if article already posted
                if Link.by_slug(article["slug"]) is not None:
                    continue
                link = Link(
                    title=article["title"],
                    slug=article["slug"],
                    text=article["text"],
                    url=article["url"],
                    feed_id=source.feed_id,
                    user_id=AUTOPOSTER_ID,
                )
                link.commit()
            source.next_update = now + timedelta(
                seconds=source.update_interval)
            source.save()
コード例 #3
0
ファイル: create_testing_data.py プロジェクト: matoous/newz
def create_stories():
    with open("news/scripts/stories.csv", "r", encoding="utf8") as f:
        for line in f.readlines():
            try:
                url, title, text, feed = line.split(";")
                f = Feed.by_slug(slugify(feed))
                if f is None:
                    f = Feed(name=feed, slug=slugify(feed), description="")
                    f.commit()
                if Link.by_slug(slugify(title)) is None:
                    l = Link(
                        title=title,
                        slug=slugify(title),
                        url=url,
                        text=text,
                        user_id=12345,
                        feed_id=f.id,
                    )
                    l.commit()
            except Exception as e:
                print("Error on line:", line, e)