def create_tables(): User.create_table() Feed.create_table() Link.create_table() Ban.create_table() Comment.create_table() FeedAdmin.create_table() Report.create_table() create_subscriptions_table() DisposableToken.create_table() CommentVote.create_table() LinkVote.create_table()
def create_tables(app): with app.app_context(): from news.models.disposable_token import DisposableToken DisposableToken.create_table() from news.models.feed import Feed Feed.create_table() from news.models.user import User User.create_table() from news.models.link import Link Link.create_table() from news.models.vote import LinkVote LinkVote.create_table() from news.models.comment import Comment Comment.create_table() from news.models.vote import CommentVote CommentVote.create_table() from news.models.subscriptions import create_subscriptions_table create_subscriptions_table() from news.models.feed_admin import FeedAdmin FeedAdmin.create_table() from news.models.report import Report Report.create_table() from news.models.link import SavedLink SavedLink.create_table() from news.models.ban import Ban Ban.create_table()
def to_python(self, value: str) -> Optional[Feed]: if value == "": abort(404) feed = Feed.by_slug(value) if feed is None: abort(404) return feed
def to_python(self, value): from news.models.feed import Feed feeds = [] for feed_id in value.split("+"): feed = Feed.by_id(feed_id) if feed is not None: feeds.append(feed)
def admin(): if not current_user.is_authenticated: return redirect("/login") if not current_user.is_god: return redirect("/") all_feeds = Feed.get() return render_template("admin.html", all_feeds=all_feeds)
def feed(self) -> "Feed": """ Feed where the lin was posted :return: feed """ from news.models.feed import Feed if "feed" not in self._relations: self._relations["feed"] = Feed.by_id(self.feed_id) return self._relations["feed"]
def feed(self) -> "Feed": """ Get feed for which the ban applies :return: feed """ from news.models.feed import Feed if "feed" not in self._relations: self._relations["feed"] = Feed.by_id(self.feed_id) return self._relations["feed"]
def feed(self): """ Return feed to which this FQS belongs. Caches the result in FQS relations. :return: Feed """ from news.models.feed import Feed if "feed" not in self._relations: self._relations["feed"] = Feed.by_id(self.feed_id) return self._relations["feed"]
def create_stories(): with open("news/scripts/stories.csv", "r", encoding="utf8") as f: for line in f.readlines(): try: url, title, text, feed = line.split(";") f = Feed.by_slug(slugify(feed)) if f is None: f = Feed(name=feed, slug=slugify(feed), description="") f.commit() if Link.by_slug(slugify(title)) is None: l = Link( title=title, slug=slugify(title), url=url, text=text, user_id=12345, feed_id=f.id, ) l.commit() except Exception as e: print("Error on line:", line, e)
def apply(self, user=None, feed=None): if user is None: from news.models.user import User user = User.by_id(self.user_id) if feed is None: from news.models.feed import Feed feed = Feed.by_id(self.feed_id) # can't ban admin if user.is_feed_admin(feed): return False user.unsubscribe(feed) self.save() self.write_to_cache()
def importHN(): import feedparser u = User.where("id", 1).first() f = Feed.where("slug", "testfeed").first() d = feedparser.parse("https://news.ycombinator.com/rss") # https://news.ycombinator.com/rss # https://news.nationalgeographic.com/news/misc/rss for entry in d["entries"]: ll = Link( title=entry["title"], slug=slugify(entry["title"]), text="", url=entry["link"], feed_id=f.id, user_id=u.id, ) try: ll.commit() except Exception as e: pass
def feed(self): from news.models.feed import Feed if not "feed" in self._relations: self._relations["feed"] = Feed.by_id(self.feed_id) return self._relations["feed"]
def subscribed_feeds(self) -> List["Feed"]: from news.models.feed import Feed return [Feed.by_id(x) for x in self.subscribed_feed_ids]
def create_default_feeds(): u = User(username="******", email="*****@*****.**") u1 = User(username="******", email="*****@*****.**") u2 = User(username="******", email="*****@*****.**") u.set_password("lokiloki") u1.set_password("testtest") u2.set_password("testtest") try: u.save() u1.save() u2.save() except: u = User.where("id", 1).first() feeds = [ Feed( name="Good long reads", description="Good long articles for you to waste time and learn something new.", slug=slugify("Good long reads"), ), Feed( name="The Awesome Earth", description="", slug=slugify("The Awesome Earth") ), Feed(name="Wildlife", description="", slug=slugify("Wildlife")), Feed(name="Python", description="", slug=slugify("Python")), Feed(name="Golang", description="", slug=slugify("Golang")), Feed(name="Hackernews", description="", slug=slugify("Hackernews")), Feed(name="Testfeed", description="The Testing Feed", slug=slugify("Testfeed")), ] for feed in feeds: try: feed.save() except: pass f = Feed.where("slug", "hackernews").first() l = Link( title="Why Pi Matters", slug=slugify("Why Pi Matters"), text="Every March 14th, mathematicians like me are prodded out of our burrows like Punxsutawney Phil " "on Groundhog Day, blinking and bewildered by all the fuss. Yes, it’s Pi Day again. And not just " "any Pi Day. They’re calling this the Pi Day of the century: 3.14.15. Pi to five digits. A " "once-in-a-lifetime thing.", url="https://www.newyorker.com/tech/elements/pi-day-why-pi-matters", feed_id=f.id, user_id=u.id, ) try: l.commit() except: pass l2 = Link( title="Reddit and the Struggle to Detoxify the Internet", slug=slugify("Reddit and the Struggle to Detoxify the Internet"), text="How do we fix life online without limiting free speech?", url="https://www.newyorker.com/magazine/2018/03/19/reddit-and-the-struggle-to-detoxify-the-internet", feed_id=f.id, user_id=u.id, ) try: l2.commit() except: pass f = Feed.where("slug", "the-awesome-earth").first() l3 = Link( title="Is This the Underground Everest?", slug=slugify("Is This the Underground Everest?"), text="Far beneath a remote mountain range in Uzbekistan, explorers are delving into a labyrinth that could be the world's deepest cave.", url="https://www.nationalgeographic.com/magazine/2017/03/dark-star-deepest-cave-climbing-uzbekistan/", feed_id=f.id, user_id=u.id, ) try: l3.commit() except: pass f = Feed.where("slug", "good-long-reads").first() l4 = Link( title="The Man Who’s Helped Elon Musk, Tom Brady, and Ari Emanuel Get Dressed", slug=slugify( "The Man Who’s Helped Elon Musk, Tom Brady, and Ari Emanuel Get Dressed" ), text="Andrew Weitz spruces up Hollywood’s reluctant Zoolanders.", url="https://www.newyorker.com/magazine/2018/03/19/the-man-whos-helped-elon-musk-tom-brady-and-ari-emanuel-get-dressed", feed_id=f.id, user_id=u.id, ) try: l4.commit() except: pass f = Feed.where("slug", "testfeed").first() import feedparser d = feedparser.parse("https://news.ycombinator.com/rss") for entry in d["entries"]: ll = Link( title=entry["title"], slug=slugify(entry["title"]), summary="", url=entry["link"], feed_id=f.id, user_id=u.id, ) try: ll.commit() except Exception as e: pass