Пример #1
0
def update_fqs(_, fqs_id):
    source = FullyQualifiedSource.by_id(fqs_id)

    try:
        articles = source.get_links()

        for article in articles:
            # skip if article already posted
            if Link.by_slug(article["slug"]) is not None:
                continue
            link = Link(
                title=article["title"],
                slug=article["slug"],
                text=article["text"],
                url=article["url"],
                feed_id=source.feed_id,
                user_id=12345,
            )
            link.commit()
        source.next_update = datetime.now() + timedelta(
            seconds=source.update_interval)
        source.save()
    except Exception as e:
        flash("Could not parse the RSS feed on URL".format(source.url),
              "error")

    return redirect(redirect_back(source.feed.route))
Пример #2
0
def import_fqs():
    from news.models.link import Link
    from news.models.fully_qualified_source import FullyQualifiedSource

    print("Importing Fully Qualified Sources")
    while True:
        # Get batch of FQS
        now = datetime.now()
        sources = (FullyQualifiedSource.where("next_update", "<",
                                              now).limit(BATCH_SIZE).get())

        # No FQS left to check
        if not sources or sources == []:
            print("Finished")
            break

        # Check FQS
        for source in sources:
            print("Source {}".format(source.url))
            try:
                articles = source.get_links()
            except Exception as e:
                print("couldn't get links for FQS {}, error: {}".format(
                    source.url, e))
                articles = []
            for article in articles:
                # skip if article already posted
                if Link.by_slug(article["slug"]) is not None:
                    continue
                link = Link(
                    title=article["title"],
                    slug=article["slug"],
                    text=article["text"],
                    url=article["url"],
                    feed_id=source.feed_id,
                    user_id=AUTOPOSTER_ID,
                )
                link.commit()
            source.next_update = now + timedelta(
                seconds=source.update_interval)
            source.save()
Пример #3
0
def create_stories():
    with open("news/scripts/stories.csv", "r", encoding="utf8") as f:
        for line in f.readlines():
            try:
                url, title, text, feed = line.split(";")
                f = Feed.by_slug(slugify(feed))
                if f is None:
                    f = Feed(name=feed, slug=slugify(feed), description="")
                    f.commit()
                if Link.by_slug(slugify(title)) is None:
                    l = Link(
                        title=title,
                        slug=slugify(title),
                        url=url,
                        text=text,
                        user_id=12345,
                        feed_id=f.id,
                    )
                    l.commit()
            except Exception as e:
                print("Error on line:", line, e)
Пример #4
0
def importHN():
    import feedparser

    u = User.where("id", 1).first()
    f = Feed.where("slug", "testfeed").first()
    d = feedparser.parse("https://news.ycombinator.com/rss")
    # https://news.ycombinator.com/rss
    # https://news.nationalgeographic.com/news/misc/rss
    for entry in d["entries"]:
        ll = Link(
            title=entry["title"],
            slug=slugify(entry["title"]),
            text="",
            url=entry["link"],
            feed_id=f.id,
            user_id=u.id,
        )
        try:
            ll.commit()
        except Exception as e:
            pass
Пример #5
0
def create_default_feeds():
    u = User(username="******", email="*****@*****.**")
    u1 = User(username="******", email="*****@*****.**")
    u2 = User(username="******", email="*****@*****.**")
    u.set_password("lokiloki")
    u1.set_password("testtest")
    u2.set_password("testtest")
    try:
        u.save()
        u1.save()
        u2.save()
    except:
        u = User.where("id", 1).first()

    feeds = [
        Feed(
            name="Good long reads",
            description="Good long articles for you to waste time and learn something new.",
            slug=slugify("Good long reads"),
        ),
        Feed(
            name="The Awesome Earth", description="", slug=slugify("The Awesome Earth")
        ),
        Feed(name="Wildlife", description="", slug=slugify("Wildlife")),
        Feed(name="Python", description="", slug=slugify("Python")),
        Feed(name="Golang", description="", slug=slugify("Golang")),
        Feed(name="Hackernews", description="", slug=slugify("Hackernews")),
        Feed(name="Testfeed", description="The Testing Feed", slug=slugify("Testfeed")),
    ]
    for feed in feeds:
        try:
            feed.save()
        except:
            pass

    f = Feed.where("slug", "hackernews").first()
    l = Link(
        title="Why Pi Matters",
        slug=slugify("Why Pi Matters"),
        text="Every March 14th, mathematicians like me are prodded out of our burrows like Punxsutawney Phil "
        "on Groundhog Day, blinking and bewildered by all the fuss. Yes, it’s Pi Day again. And not just "
        "any Pi Day. They’re calling this the Pi Day of the century: 3.14.15. Pi to five digits. A "
        "once-in-a-lifetime thing.",
        url="https://www.newyorker.com/tech/elements/pi-day-why-pi-matters",
        feed_id=f.id,
        user_id=u.id,
    )
    try:
        l.commit()
    except:
        pass
    l2 = Link(
        title="Reddit and the Struggle to Detoxify the Internet",
        slug=slugify("Reddit and the Struggle to Detoxify the Internet"),
        text="How do we fix life online without limiting free speech?",
        url="https://www.newyorker.com/magazine/2018/03/19/reddit-and-the-struggle-to-detoxify-the-internet",
        feed_id=f.id,
        user_id=u.id,
    )
    try:
        l2.commit()
    except:
        pass
    f = Feed.where("slug", "the-awesome-earth").first()
    l3 = Link(
        title="Is This the Underground Everest?",
        slug=slugify("Is This the Underground Everest?"),
        text="Far beneath a remote mountain range in Uzbekistan, explorers are delving into a labyrinth that could be the world's deepest cave.",
        url="https://www.nationalgeographic.com/magazine/2017/03/dark-star-deepest-cave-climbing-uzbekistan/",
        feed_id=f.id,
        user_id=u.id,
    )
    try:
        l3.commit()
    except:
        pass

    f = Feed.where("slug", "good-long-reads").first()
    l4 = Link(
        title="The Man Who’s Helped Elon Musk, Tom Brady, and Ari Emanuel Get Dressed",
        slug=slugify(
            "The Man Who’s Helped Elon Musk, Tom Brady, and Ari Emanuel Get Dressed"
        ),
        text="Andrew Weitz spruces up Hollywood’s reluctant Zoolanders.",
        url="https://www.newyorker.com/magazine/2018/03/19/the-man-whos-helped-elon-musk-tom-brady-and-ari-emanuel-get-dressed",
        feed_id=f.id,
        user_id=u.id,
    )
    try:
        l4.commit()
    except:
        pass

    f = Feed.where("slug", "testfeed").first()

    import feedparser

    d = feedparser.parse("https://news.ycombinator.com/rss")
    for entry in d["entries"]:
        ll = Link(
            title=entry["title"],
            slug=slugify(entry["title"]),
            summary="",
            url=entry["link"],
            feed_id=f.id,
            user_id=u.id,
        )
        try:
            ll.commit()
        except Exception as e:
            pass