def update_fqs(_, fqs_id): source = FullyQualifiedSource.by_id(fqs_id) try: articles = source.get_links() for article in articles: # skip if article already posted if Link.by_slug(article["slug"]) is not None: continue link = Link( title=article["title"], slug=article["slug"], text=article["text"], url=article["url"], feed_id=source.feed_id, user_id=12345, ) link.commit() source.next_update = datetime.now() + timedelta( seconds=source.update_interval) source.save() except Exception as e: flash("Could not parse the RSS feed on URL".format(source.url), "error") return redirect(redirect_back(source.feed.route))
def create_tables(): User.create_table() Feed.create_table() Link.create_table() Ban.create_table() Comment.create_table() FeedAdmin.create_table() Report.create_table() create_subscriptions_table() DisposableToken.create_table() CommentVote.create_table() LinkVote.create_table()
def create_tables(app): with app.app_context(): from news.models.disposable_token import DisposableToken DisposableToken.create_table() from news.models.feed import Feed Feed.create_table() from news.models.user import User User.create_table() from news.models.link import Link Link.create_table() from news.models.vote import LinkVote LinkVote.create_table() from news.models.comment import Comment Comment.create_table() from news.models.vote import CommentVote CommentVote.create_table() from news.models.subscriptions import create_subscriptions_table create_subscriptions_table() from news.models.feed_admin import FeedAdmin FeedAdmin.create_table() from news.models.report import Report Report.create_table() from news.models.link import SavedLink SavedLink.create_table() from news.models.ban import Ban Ban.create_table()
def index(): sort = None if current_user.is_authenticated: s = request.args.get("sort", "trending") if s == "trending": links = trending_links(current_user.subscribed_feed_ids) sort = "Trending" elif s == "new": links = new_links(current_user.subscribed_feed_ids) sort = "New" else: links = best_links(current_user.subscribed_feed_ids, "all") sort = "Best" else: links = trending_links(current_app.config["DEFAULT_FEEDS"]) count = request.args.get("count", default=None, type=int) paginated_ids, has_less, has_more = paginate(links, 20) links = Link.by_ids(paginated_ids) if paginated_ids else [] return render_template( "index.html", links=links, show_logo=True, less_links=has_less, more_links=has_more, title="eSource News", sort=sort, count=count, )
def get_feed(feed, sort=None): """ Default feed page Sort can be specified by user or default to user's preferred sort or to feed default sort :param feed: feed :param sort: sort :return: """ # if (sort is None or sort not in ['trending', 'new', 'best']) and current_user.is_authenticated: # sort = current_user.preferred_sort if sort is None: sort = feed.default_sort ids, has_less, has_more = paginate( LinkQuery(feed_id=feed.id, sort=sort).fetch_ids(), 20) links = Link.by_ids(ids) if len(ids) > 0 else [] if sort == "new" and current_user.is_authenticated: links = filter(min_score_filter(current_user.p_min_link_score), links) feed.links = links return render_template("feed.html", feed=feed, less_links=has_less, more_links=has_more, sort=sort)
def to_python(self, value: str) -> Optional[Link]: if value == "": abort(404) link = Link.by_id(value) if link is None: abort(404) return link
def links(self): """ Users posted links :return: links """ from news.models.link import Link return Link.where("user_id", self.id).get()
def archive_links(): old_links = ( Link.where("archived", False) .where_raw("created_at < NOW() - INTERVAL '30 days'") .get() ) for link in old_links: link.archive()
def link(self) -> "Link": """ Return the link that was voted on :return: link """ from news.models.link import Link if "link" not in self._relations: self._relations["link"] = Link.by_id(self.link_id) return self._relations["link"]
def link(self): """ Get link to which this comments belongs :return: Parent Link of Comment """ from news.models.link import Link if not "link" in self._relations: self._relations["link"] = Link.by_id(self.link_id) return self._relations["link"]
def get_feed_rss(feed): """ Returns the feed in rss form :param feed: feed :return: """ ids, _, _ = paginate( LinkQuery(feed_id=feed.id, sort="trending").fetch_ids(), 30) links = Link.by_ids(ids) return rss_page(feed, links)
def thing(self): if "thing" not in self._relations: if self.reportable_type == "comments": from news.models.comment import Comment self._relations["thing"] = Comment.by_id(self.reportable_id) if self.reportable_type == "links": from news.models.link import Link self._relations["thing"] = Link.by_id(self.reportable_id) return self._relations["thing"]
def import_fqs(): from news.models.link import Link from news.models.fully_qualified_source import FullyQualifiedSource print("Importing Fully Qualified Sources") while True: # Get batch of FQS now = datetime.now() sources = (FullyQualifiedSource.where("next_update", "<", now).limit(BATCH_SIZE).get()) # No FQS left to check if not sources or sources == []: print("Finished") break # Check FQS for source in sources: print("Source {}".format(source.url)) try: articles = source.get_links() except Exception as e: print("couldn't get links for FQS {}, error: {}".format( source.url, e)) articles = [] for article in articles: # skip if article already posted if Link.by_slug(article["slug"]) is not None: continue link = Link( title=article["title"], slug=article["slug"], text=article["text"], url=article["url"], feed_id=source.feed_id, user_id=AUTOPOSTER_ID, ) link.commit() source.next_update = now + timedelta( seconds=source.update_interval) source.save()
def create_stories(): with open("news/scripts/stories.csv", "r", encoding="utf8") as f: for line in f.readlines(): try: url, title, text, feed = line.split(";") f = Feed.by_slug(slugify(feed)) if f is None: f = Feed(name=feed, slug=slugify(feed), description="") f.commit() if Link.by_slug(slugify(title)) is None: l = Link( title=title, slug=slugify(title), url=url, text=text, user_id=12345, feed_id=f.id, ) l.commit() except Exception as e: print("Error on line:", line, e)
def importHN(): import feedparser u = User.where("id", 1).first() f = Feed.where("slug", "testfeed").first() d = feedparser.parse("https://news.ycombinator.com/rss") # https://news.ycombinator.com/rss # https://news.nationalgeographic.com/news/misc/rss for entry in d["entries"]: ll = Link( title=entry["title"], slug=slugify(entry["title"]), text="", url=entry["link"], feed_id=f.id, user_id=u.id, ) try: ll.commit() except Exception as e: pass
def new(): links = new_links(current_app.config["DEFAULT_FEEDS"]) paginated_ids, has_less, has_more = paginate(links, 20) links = Link.by_ids(paginated_ids) return render_template( "index.html", links=links, less_links=has_less, more_links=has_more, title="eSource News - New", )
def remove_link(feed, link_id): """ Removes link from given feed This is a hard delete, so be careful :param feed: feed :param link_slug: link slug :return: """ link = Link.by_id(link_id) if link is None: abort(404) link.delete() return redirect(redirect_back(feed.route))
def _rebuild(self): """ Rebuild link query from database """ from news.models.link import Link q = (Link.where("feed_id", self.feed_id).order_by_raw( sorts[self.sort]).limit(1000)) # cache needs array of objects, not a orator collection res = [self._tupler(l) for l in q.get()] self._data = sort_tuples(res) self._fetched = True self._save()
def best(): time = request.args.get("time") links = best_links(current_app.config["DEFAULT_FEEDS"], time_limit=time if time else "all") paginated_ids, has_less, has_more = paginate(links, 20) links = Link.by_ids(paginated_ids) return render_template( "index.html", links=links, less_links=has_less, more_links=has_more, title="eSource News - Best", )
def index_rss(): if current_user.is_authenticated: links = trending_links(current_user.subscribed_feed_ids) else: links = trending_links(current_app.config["DEFAULT_FEEDS"]) paginated_ids, _, _ = paginate(links, 30) links = Link.by_ids(paginated_ids) # TODO maybe do through fake feed (that's what reddit does and it actually makes sense) fg = FeedGenerator() fg.id("https://localhost:5000/") fg.title("Newsfeed") fg.link(href="http://localhost:5000/", rel="self") fg.description("Global news agrregator!") fg.language("en") for entry in rss_entries(links): fg.add_entry(entry) return fg.rss_str(pretty=True)
def users_profile(username): user = User.by_username(username) if user is None: abort(404) if user.id == 12345: return render_template("autoposter_profile.html", user=user) links = (Link.where( "user_id", user.id).order_by_raw("ups - downs DESC").limit(9).get()) comments = (Comment.where( "user_id", user.id).order_by_raw("ups - downs DESC").limit(6).get()) administrations = FeedAdmin.by_user_id(user.id) return render_template( "profile.html", user=user, links=links[:min(len(links), 8)], has_more_links=len(links) > 8, comments=comments[:min(len(comments), 5)], has_more_comments=len(comments) > 5, administrations=administrations, active_section="about", )
def create_default_feeds(): u = User(username="******", email="*****@*****.**") u1 = User(username="******", email="*****@*****.**") u2 = User(username="******", email="*****@*****.**") u.set_password("lokiloki") u1.set_password("testtest") u2.set_password("testtest") try: u.save() u1.save() u2.save() except: u = User.where("id", 1).first() feeds = [ Feed( name="Good long reads", description="Good long articles for you to waste time and learn something new.", slug=slugify("Good long reads"), ), Feed( name="The Awesome Earth", description="", slug=slugify("The Awesome Earth") ), Feed(name="Wildlife", description="", slug=slugify("Wildlife")), Feed(name="Python", description="", slug=slugify("Python")), Feed(name="Golang", description="", slug=slugify("Golang")), Feed(name="Hackernews", description="", slug=slugify("Hackernews")), Feed(name="Testfeed", description="The Testing Feed", slug=slugify("Testfeed")), ] for feed in feeds: try: feed.save() except: pass f = Feed.where("slug", "hackernews").first() l = Link( title="Why Pi Matters", slug=slugify("Why Pi Matters"), text="Every March 14th, mathematicians like me are prodded out of our burrows like Punxsutawney Phil " "on Groundhog Day, blinking and bewildered by all the fuss. Yes, it’s Pi Day again. And not just " "any Pi Day. They’re calling this the Pi Day of the century: 3.14.15. Pi to five digits. A " "once-in-a-lifetime thing.", url="https://www.newyorker.com/tech/elements/pi-day-why-pi-matters", feed_id=f.id, user_id=u.id, ) try: l.commit() except: pass l2 = Link( title="Reddit and the Struggle to Detoxify the Internet", slug=slugify("Reddit and the Struggle to Detoxify the Internet"), text="How do we fix life online without limiting free speech?", url="https://www.newyorker.com/magazine/2018/03/19/reddit-and-the-struggle-to-detoxify-the-internet", feed_id=f.id, user_id=u.id, ) try: l2.commit() except: pass f = Feed.where("slug", "the-awesome-earth").first() l3 = Link( title="Is This the Underground Everest?", slug=slugify("Is This the Underground Everest?"), text="Far beneath a remote mountain range in Uzbekistan, explorers are delving into a labyrinth that could be the world's deepest cave.", url="https://www.nationalgeographic.com/magazine/2017/03/dark-star-deepest-cave-climbing-uzbekistan/", feed_id=f.id, user_id=u.id, ) try: l3.commit() except: pass f = Feed.where("slug", "good-long-reads").first() l4 = Link( title="The Man Who’s Helped Elon Musk, Tom Brady, and Ari Emanuel Get Dressed", slug=slugify( "The Man Who’s Helped Elon Musk, Tom Brady, and Ari Emanuel Get Dressed" ), text="Andrew Weitz spruces up Hollywood’s reluctant Zoolanders.", url="https://www.newyorker.com/magazine/2018/03/19/the-man-whos-helped-elon-musk-tom-brady-and-ari-emanuel-get-dressed", feed_id=f.id, user_id=u.id, ) try: l4.commit() except: pass f = Feed.where("slug", "testfeed").first() import feedparser d = feedparser.parse("https://news.ycombinator.com/rss") for entry in d["entries"]: ll = Link( title=entry["title"], slug=slugify(entry["title"]), summary="", url=entry["link"], feed_id=f.id, user_id=u.id, ) try: ll.commit() except Exception as e: pass
def links_query(self, sort: str = "trending") -> [Link]: return Link.by_feed(self, sort)