def monitor(request): if request.method == "POST": key = request.form.get("keyword") if not key or len(key) < 5 or len(key) > 20: raise NotFound() keyword = session.query(Keyword).filter(Keyword.keyword.ilike(key)).first() if not keyword: chars = "1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM" hash = "".join([random.choice(chars) for c in range(5)]) keyword = Keyword(key, hash) session.commit() if request.logged_in: user = session.query(User).filter(User.name == request.user).first() monitored = ( session.query(Monitoring) .filter(Monitoring.user_uid == user.uid) .filter(Monitoring.keyword_uid == keyword.uid) .first() ) if not monitored: m = Monitoring(user.uid, keyword.uid) session.commit() return redirect(url_for("monitor", hash=keyword.hash, slug=keyword.keyword)) cachekey = _hash("views_content_monitor") cached = cache.get(cachekey) if cached: return cached keys = ["bozarking", "reddit search", "jailbait"] keywords = session.query(Keyword).filter(Keyword.keyword.in_(keys)).all() response = serve_response("monitor.html", keywords=keywords) cache.set(cachekey, response, 10 * 60) return response
def monitor(request): if request.method == 'POST': key = request.form.get('keyword') if not key or len(key) < 5 or len(key) > 20: raise NotFound() keyword = session.query(Keyword).filter( Keyword.keyword.ilike(key)).first() if not keyword: chars = '1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM' hash = ''.join([random.choice(chars) for c in range(5)]) keyword = Keyword(key, hash) session.commit() if request.logged_in: user = session.query(User).filter( User.name == request.user).first() monitored = session.query(Monitoring).filter( Monitoring.user_uid == user.uid).filter( Monitoring.keyword_uid == keyword.uid).first() if not monitored: m = Monitoring(user.uid, keyword.uid) session.commit() return redirect( url_for('monitor', hash=keyword.hash, slug=keyword.keyword)) cachekey = _hash('views_content_monitor') cached = cache.get(cachekey) if cached: return cached keys = ['bozarking', 'reddit search', 'jailbait'] keywords = session.query(Keyword).filter(Keyword.keyword.in_(keys)).all() response = serve_response('monitor.html', keywords=keywords) cache.set(cachekey, response, 10 * 60) return response
def remove_monitoring(request, hash): user = session.query(User).filter(User.name==request.user).first() keyword = session.query(Keyword).filter(Keyword.hash==hash).first() if not user or not keyword: raise Forbidden() session.query(Monitoring).filter(Monitoring.user_uid==user.uid).filter(Monitoring.keyword_uid==keyword.uid).delete(synchronize_session=False) session.commit() return serve_json('')
def remove_monitoring(request, hash): user = session.query(User).filter(User.name == request.user).first() keyword = session.query(Keyword).filter(Keyword.hash == hash).first() if not user or not keyword: raise Forbidden() session.query(Monitoring).filter(Monitoring.user_uid == user.uid).filter( Monitoring.keyword_uid == keyword.uid).delete( synchronize_session=False) session.commit() return serve_json('')
def autocomplete(request, name): query = request.args.get('q') if not query: raise NotFound if name == 'tags': res = session.query(Tag).filter(Tag.name.ilike(query+'%')).limit(10) suggestions = '\n'.join(r.name for r in res) elif name == 'reddits': res = session.query(Subreddit).filter(Subreddit.url.ilike(query+'%')).order_by(Subreddit.subscribers.desc()).limit(10) suggestions = '\n'.join(r.url for r in res) return serve_text(suggestions)
def autocomplete(request, name): query = request.args.get('q') if not query: raise NotFound if name == 'tags': res = session.query(Tag).filter(Tag.name.ilike(query + '%')).limit(10) suggestions = '\n'.join(r.name for r in res) elif name == 'reddits': res = session.query(Subreddit).filter( Subreddit.url.ilike(query + '%')).order_by( Subreddit.subscribers.desc()).limit(10) suggestions = '\n'.join(r.url for r in res) return serve_text(suggestions)
def index(request): subreddits = session.query(Subreddit)\ .filter(Subreddit.subscribers>100)\ .filter(Subreddit.fp_submissions==50)\ .filter(Subreddit.all_age_latest<31)\ .filter(Subreddit.over18==False)\ .order_by(sql_random()).limit(30).all() logos = session.query(Subreddit)\ .filter(Subreddit.logo==True)\ .filter(Subreddit.all_age_latest<90)\ .filter(Subreddit.over18==False)\ .filter(Subreddit.subscribers>100)\ .order_by(sql_random()).limit(30).all() return serve_response('index.html', subreddits=subreddits, logos=logos)
def get(self): http = httplib2.Http() headers = self._login() print headers images = SoupStrainer('img') subreddits = session.query(Subreddit).filter(Subreddit.logo==None).order_by(Subreddit.subscribers.desc()).all() for subreddit in subreddits: url = 'http://www.reddit.com/r/%s' % subreddit.url response, content = http.request(url, headers=headers) if response['status'] >= '500': self.delay.more_exp() print response['status'], subreddit.url elif response['status'] >= '400': subreddit.logo = False session.commit() else: self.delay.less() soup = BeautifulSoup(content, parseOnlyThese=images) img_link = soup.findAll(id='header-img')[0]['src'] if img_link == 'http://static.reddit.com/reddit.com.header.png': subreddit.logo = False else: try: resp, img = http.request(img_link) f = open(paths.logos + '/' + subreddit.url + '.png', "w") f.write(img) f.close() subreddit.logo = True except: print 'Saving image failed for %s.' % subreddit.url session.commit() self.delay.sleep()
def subreddit(request, name): names = name.split('+') subreddits = session.query(Subreddit).filter( Subreddit.url.in_(names)).order_by(Subreddit.url).all() if not subreddits: raise NotFound return serve_response('subreddit.html', subreddits=subreddits)
def get(self): http = httplib2.Http() headers = self._login() print headers images = SoupStrainer('img') subreddits = session.query(Subreddit).filter( Subreddit.logo == None).order_by( Subreddit.subscribers.desc()).all() for subreddit in subreddits: url = 'http://www.reddit.com/r/%s' % subreddit.url response, content = http.request(url, headers=headers) if response['status'] >= '500': self.delay.more_exp() print response['status'], subreddit.url elif response['status'] >= '400': subreddit.logo = False session.commit() else: self.delay.less() soup = BeautifulSoup(content, parseOnlyThese=images) img_link = soup.findAll(id='header-img')[0]['src'] if img_link == 'http://static.reddit.com/reddit.com.header.png': subreddit.logo = False else: try: resp, img = http.request(img_link) f = open(paths.logos + '/' + subreddit.url + '.png', "w") f.write(img) f.close() subreddit.logo = True except: print 'Saving image failed for %s.' % subreddit.url session.commit() self.delay.sleep()
def analyse_one(self, name): reddit = session.query(Subreddit).filter(Subreddit.url.ilike(name)).first() new = self._analyse(reddit.url) if not new: return self._set_new(reddit, new) return
def cachedproxy(request, name): cached = cache.get('submissions_' + name) if cached: return cached subreddit = session.query(Subreddit).filter( Subreddit.url.ilike(name)).first() if not subreddit: raise NotFound http = httplib2.Http() uri = 'http://www.reddit.com/r/%s/.json?limit=5' % name response, content = http.request(uri, 'GET') if response['status'] == '200': out = json.loads(content) if out: out = out['data']['children'] if not out: r = serve_json('there doesn\'t seem to be anything here.') else: subreddit.all_age_latest = unix_days_ago( out[0]['data']['created_utc']) session.commit() r = serve_response('api/submissions.html', submissions=out) cache.set('submissions_' + name, r, timeout=60 * 60) else: r = serve_json('fetching submissions failed.') return r
def analyse_one(self, name): reddit = session.query(Subreddit).filter( Subreddit.url.ilike(name)).first() new = self._analyse(reddit.url) if not new: return self._set_new(reddit, new) return
def vote(request): user = session.query(User).filter_by(name=request.user).first() todo = request.form.get('todo') token = request.form.get('token') if not (todo and token): raise NotFound if not is_valid_token(todo.split('_')[1], token): raise Forbidden return exec_todo(request, user, todo)
def analyse_new(self): reddits = session.query(Subreddit).filter(Subreddit.updated==None).order_by(Subreddit.uid.asc()).all() self.logger.info('Analysing %s new reddits.' % len(reddits)) for reddit in reddits: new = self._analyse(reddit.url) if not new: continue self._set_new(reddit, new) return
def monitor_detail(request, hash, slug=None): if slug is not None and slug[-4:] == ".rss": template = "monitor-detail.xml" slug = slug[:-4] else: template = "monitor-detail.html" keyword = session.query(Keyword).filter(Keyword.hash == hash).first() if keyword is None: raise NotFound() return serve_response(template, keyword=keyword)
def monitor_detail(request, hash, slug=None): if slug is not None and slug[-4:] == '.rss': template = 'monitor-detail.xml' slug = slug[:-4] else: template = 'monitor-detail.html' keyword = session.query(Keyword).filter(Keyword.hash == hash).first() if keyword is None: raise NotFound() return serve_response(template, keyword=keyword)
def analyse_new(self): reddits = session.query(Subreddit).filter( Subreddit.updated == None).order_by(Subreddit.uid.asc()).all() self.logger.info('Analysing %s new reddits.' % len(reddits)) for reddit in reddits: new = self._analyse(reddit.url) if not new: continue self._set_new(reddit, new) return
def fix_relative_links(): import re from myapp.env import session from myapp.models import Subreddit make_app() reddits = session.query(Subreddit).filter(Subreddit.description.ilike("%]\(%")).all() for r in reddits: print r.url r.description = re.sub(r"\]\\\(", "](", r.description) session.commit()
def fix_relative_links(): import re from myapp.env import session from myapp.models import Subreddit make_app() reddits = session.query(Subreddit).filter( Subreddit.description.ilike('%]\(%')).all() for r in reddits: print r.url r.description = re.sub(r'\]\\\(', '](', r.description) session.commit()
def add_tag(request): user = session.query(User).filter_by(name=request.user).first() todo = request.form.get('todo') token = request.form.get('token') if not (todo and token): raise NotFound if not is_valid_token(todo.split('_')[1], token): raise Forbidden name = '_'.join(todo.split('_')[2:]) if len(name) > 20 or re.search(disallowed_chars, name): raise Forbidden return exec_todo(request, user, todo)
def analyse_all(self): empty = {'ups':None, 'downs':None, 'comments':None, 'media':None, 'submissions':None, 'selfposts':None, 'oldest':0, 'latest':0} reddits = session.query(Subreddit).order_by(Subreddit.uid.asc()).all() self.logger.info('Analysing all (%s) reddits.' % len(reddits)) for reddit in reddits: new = self._analyse(reddit.url) if new: self._set_new(reddit, new) else: self._set_new(reddit, empty) return
def index(request): subreddits = ( session.query(Subreddit) .filter(Subreddit.subscribers > 100) .filter(Subreddit.fp_submissions == 50) .filter(Subreddit.all_age_latest < 31) .filter(Subreddit.over18 == False) .order_by(sql_random()) .limit(30) .all() ) logos = ( session.query(Subreddit) .filter(Subreddit.logo == True) .filter(Subreddit.all_age_latest < 90) .filter(Subreddit.over18 == False) .filter(Subreddit.subscribers > 100) .order_by(sql_random()) .limit(30) .all() ) return serve_response("index.html", subreddits=subreddits, logos=logos)
def exec_todo(request, user, todo): r = todo.split('_') type = r[0] id = r[1] action = '_'.join(r[2:]) if type == 'vote': subreddit_tag = session.query(SubredditTag).filter( SubredditTag.uid == id).first() if not subreddit_tag: raise NotFound vote = session.query(Vote).filter(Vote.subreddit_tag_id == id).filter( Vote.user_id == user.uid).first() if not vote: vote = Vote(subreddit_tag.uid, user.uid) if action == 'up': vote.up() elif action == 'down': vote.down() else: raise NotFound session.commit() return serve_response('api/bigtag.html', tag=subreddit_tag) elif type == 'tag': subreddit = session.query(Subreddit).filter_by(id=id).first() tag = session.query(Tag).filter_by(name=action).first() if not tag: tag = Tag() tag.user_id = user.uid tag.name = action session.commit() #see if this reddit is tagged already subreddit_tag = session.query(SubredditTag)\ .filter(SubredditTag.tag_id==tag.uid)\ .filter(SubredditTag.subreddit_id==subreddit.uid).first() if subreddit_tag: #upvote vote = session.query(Vote)\ .filter(Vote.subreddit_tag_id==subreddit_tag.uid)\ .filter(Vote.user_id==user.uid).first() if not vote: vote = Vote(subreddit_tag.uid, user.uid) vote.up() session.commit() return serve_response('api/bigtag.html', tag=subreddit_tag) subreddit_tag = SubredditTag() subreddit_tag.tag_id = tag.uid subreddit_tag.subreddit_id = subreddit.uid subreddit_tag.user_id = user.uid subreddit.tags.append(subreddit_tag) session.commit() vote = Vote(subreddit_tag.uid, user.uid) vote.up() session.commit() return serve_response('api/bigtag.html', tag=subreddit_tag) else: raise NotFound
def exec_todo(request, user, todo): r = todo.split('_'); type = r[0] id = r[1] action = '_'.join(r[2:]) if type == 'vote': subreddit_tag = session.query(SubredditTag).filter(SubredditTag.uid==id).first() if not subreddit_tag: raise NotFound vote = session.query(Vote).filter(Vote.subreddit_tag_id==id).filter(Vote.user_id==user.uid).first() if not vote: vote = Vote(subreddit_tag.uid, user.uid) if action == 'up': vote.up() elif action == 'down': vote.down() else: raise NotFound session.commit() return serve_response('api/bigtag.html', tag=subreddit_tag) elif type == 'tag': subreddit = session.query(Subreddit).filter_by(id=id).first() tag = session.query(Tag).filter_by(name=action).first() if not tag: tag = Tag() tag.user_id = user.uid tag.name = action session.commit() #see if this reddit is tagged already subreddit_tag = session.query(SubredditTag)\ .filter(SubredditTag.tag_id==tag.uid)\ .filter(SubredditTag.subreddit_id==subreddit.uid).first() if subreddit_tag: #upvote vote = session.query(Vote)\ .filter(Vote.subreddit_tag_id==subreddit_tag.uid)\ .filter(Vote.user_id==user.uid).first() if not vote: vote = Vote(subreddit_tag.uid, user.uid) vote.up() session.commit() return serve_response('api/bigtag.html', tag=subreddit_tag) subreddit_tag = SubredditTag() subreddit_tag.tag_id = tag.uid subreddit_tag.subreddit_id = subreddit.uid subreddit_tag.user_id = user.uid subreddit.tags.append(subreddit_tag) session.commit() vote = Vote(subreddit_tag.uid, user.uid) vote.up() session.commit() return serve_response('api/bigtag.html', tag=subreddit_tag) else: raise NotFound
def cleandb(self): for k in self._keywords(): if k.accessed < (datetime.utcnow() - timedelta(30)): session.query(Mention)\ .filter(Mention.keyword_uid==k.uid)\ .delete(synchronize_session=False) session.query(Monitoring)\ .filter(Monitoring.keyword_uid==k.uid)\ .delete(synchronize_session=False) session.query(Keyword)\ .filter(Keyword.uid==k.uid)\ .delete(synchronize_session=False) elif len(k.mentions) > 50: m = session.query(Mention)\ .filter(Mention.keyword_uid==k.uid)\ .order_by(Mention.created.desc())\ .offset(49).first() session.query(Mention)\ .filter(Mention.keyword_uid==k.uid)\ .filter(Mention.uid<m.uid)\ .delete(synchronize_session=False) session.commit()
def analyse_all(self): empty = { 'ups': None, 'downs': None, 'comments': None, 'media': None, 'submissions': None, 'selfposts': None, 'oldest': 0, 'latest': 0 } reddits = session.query(Subreddit).order_by(Subreddit.uid.asc()).all() self.logger.info('Analysing all (%s) reddits.' % len(reddits)) for reddit in reddits: new = self._analyse(reddit.url) if new: self._set_new(reddit, new) else: self._set_new(reddit, empty) return
def logos(request, view="random"): per_page = 30 query = session.query(Subreddit).filter(Subreddit.logo == True) if view == "all": page = int(request.args.get("page", 1)) query = query.filter(Subreddit.over18 == False).order_by(Subreddit.subscribers.desc()) pagination = Pagination(query, per_page, page, "logos") return serve_response("logos.html", pagination=pagination, view="all") elif view == "over18": page = int(request.args.get("page", 1)) query = query.filter(Subreddit.over18 == True).order_by(Subreddit.subscribers.desc()) pagination = Pagination(query, per_page, page, "logos") return serve_response("logos.html", pagination=pagination, view="over18") else: logos = ( query.filter(Subreddit.over18 == False) .filter(Subreddit.subscribers > 100) .order_by(sql_random()) .limit(per_page) .all() ) return serve_response("logos.html", logos=logos, view="random")
def logos(request, view='random'): per_page = 30 query = session.query(Subreddit).filter(Subreddit.logo == True) if view == 'all': page = int(request.args.get('page', 1)) query = query.filter(Subreddit.over18 == False).order_by( Subreddit.subscribers.desc()) pagination = Pagination(query, per_page, page, 'logos') return serve_response('logos.html', pagination=pagination, view='all') elif view == 'over18': page = int(request.args.get('page', 1)) query = query.filter(Subreddit.over18 == True).order_by( Subreddit.subscribers.desc()) pagination = Pagination(query, per_page, page, 'logos') return serve_response('logos.html', pagination=pagination, view='over18') else: logos = query.filter(Subreddit.over18 == False).filter( Subreddit.subscribers > 100).order_by( sql_random()).limit(per_page).all() return serve_response('logos.html', logos=logos, view='random')
def get_new(self): new_reddits = 0 first_uri = 'http://www.reddit.com/reddits/new/.json' current_uri = first_uri counter = 0 while True: page = self._get_json(current_uri) if not page: self.logger.error( 'ERROR retrieving page %s. Spidering aborted.\n' '%s reddits scanned.\n%s new reddits found.' % (current_uri, counter, new_reddits)) return reddits = page['data']['children'] for reddit in reddits: reddit = reddit['data'] id = reddit['id'] s = session.query(Subreddit).filter_by(id=id).first() if not s: self.logger.info('new subreddit: %s' % reddit['url']) new_reddits += 1 s = Subreddit() s.name = reddit['name'] s.created = unix_string(int(reddit['created'])) s.url = reddit['url'][3:-1] s.title = reddit['title'] s.over18 = reddit['over18'] s.subscribers = reddit['subscribers'] s.id = reddit['id'] s.description = reddit['description'] session.commit() counter += len(reddits) after = page['data']['after'] current_uri = '%s?count=%s&after=%s' % (first_uri, counter, after) if not after: self.logger.info('Finished spidering.\n' '%s reddits scanned.\n%s new reddits found.' % (counter, new_reddits)) return
def get_new(self): new_reddits = 0 first_uri = 'http://www.reddit.com/reddits/new/.json' current_uri = first_uri counter = 0 while True: page = self._get_json(current_uri) if not page: self.logger.error('ERROR retrieving page %s. Spidering aborted.\n' '%s reddits scanned.\n%s new reddits found.' % (current_uri, counter, new_reddits)) return reddits = page['data']['children'] for reddit in reddits: reddit = reddit['data'] id = reddit['id'] s = session.query(Subreddit).filter_by(id=id).first() if not s: self.logger.info('new subreddit: %s' % reddit['url']) new_reddits += 1 s = Subreddit() s.name = reddit['name'] s.created = unix_string(int(reddit['created'])) s.url = reddit['url'][3:-1] s.title = reddit['title'] s.over18 = reddit['over18'] s.subscribers = reddit['subscribers'] s.id = reddit['id'] s.description = reddit['description'] session.commit() counter += len(reddits) after = page['data']['after'] current_uri = '%s?count=%s&after=%s' % (first_uri, counter, after) if not after: self.logger.info('Finished spidering.\n' '%s reddits scanned.\n%s new reddits found.' % (counter, new_reddits)) return
def cachedproxy(request, name): cached = cache.get('submissions_' + name) if cached: return cached subreddit = session.query(Subreddit).filter(Subreddit.url.ilike(name)).first() if not subreddit: raise NotFound http = httplib2.Http() uri = 'http://www.reddit.com/r/%s/.json?limit=5' % name response, content = http.request(uri, 'GET') if response['status'] == '200': out = json.loads(content) if out: out = out['data']['children'] if not out: r = serve_json('there doesn\'t seem to be anything here.') else: subreddit.all_age_latest = unix_days_ago(out[0]['data']['created_utc']) session.commit() r = serve_response('api/submissions.html', submissions=out) cache.set('submissions_' + name, r, timeout=60*60) else: r = serve_json('fetching submissions failed.') return r
def reddits(request, view="cloud", filter="biggest"): key = _hash("view_reddits_" + filter + view + ";".join(["%s:%s" % (k, v) for k, v in request.args.items()])) value = cache.get(key) if value is not None: return value subscribers = request.args.get("s") or 100 page = int(request.args.get("page", 1)) query = session.query(Subreddit).filter(Subreddit.subscribers >= subscribers).filter(Subreddit.fp_submissions > 0) if filter == "new": query = query.filter(Subreddit.created > days_ago(90)) elif filter == "biggest": query = query.filter(Subreddit.subscribers > 10000) elif filter == "active": query = ( query.filter(Subreddit.fp_submissions == 50) .filter(Subreddit.all_age_latest <= 7) .filter(Subreddit.over18 == False) ) elif filter == "over18": query = query.filter(Subreddit.all_age_latest < 91).filter(Subreddit.over18 == True) elif filter == "inactive": query = query.filter(Subreddit.all_age_latest < 360).filter(Subreddit.all_age_latest > 90) elif filter == "dead": query = query.filter(Subreddit.all_age_latest > 360) elif filter == "self": query = query.filter(Subreddit.selfposts > 20) elif filter == "media": query = query.filter(Subreddit.fp_media > 20) elif filter == "filter": q = request.args.get("q") t = request.args.get("t") l = request.args.get("l") or 1 o = request.args.get("o") if q and len(q) > 2: query = query.filter( or_( Subreddit.title.ilike("%" + q + "%"), Subreddit.url.ilike("%" + q + "%"), Subreddit.description.ilike("%" + q + "%"), ) ) if t: tag = session.query(Tag).filter(Tag.name == t).first() query = query.filter(Subreddit.tags.any(SubredditTag.tag == tag)) query = query.filter(Subreddit.all_age_latest < l) if o and o != "all": over18 = True if o == "True" else False query = query.filter(Subreddit.over18 == o) else: query = ( query.filter(Subreddit.fp_submissions == 25) .filter(Subreddit.all_age_latest == 0) .filter(Subreddit.over18 == False) ) if view == "cloud": subreddits = query.order_by(Subreddit.url).all() response = serve_response( "reddits.html", view=view, filter=filter, subreddits=subreddits, querystring=request.args ) elif view == "list": query = query.order_by(Subreddit.subscribers.desc()) pagination = Pagination(query, 50, page, "reddits") response = serve_response( "reddits.html", view=view, filter=filter, pagination=pagination, querystring=request.args ) if not "response" in locals(): raise NotFound cache.set(key, response, 3600) return response
def _keywords(self): keywords = session.query(Keyword).order_by(Keyword.keyword.asc()).all() return keywords
def reddits(request, view='cloud', filter='biggest'): key = _hash('view_reddits_' + filter + view + ';'.join(['%s:%s' % (k, v) for k, v in request.args.items()])) value = cache.get(key) if value is not None: return value subscribers = request.args.get('s') or 100 page = int(request.args.get('page', 1)) query = session.query(Subreddit).filter( Subreddit.subscribers >= subscribers).filter( Subreddit.fp_submissions > 0) if filter == 'new': query = query.filter(Subreddit.created > days_ago(90)) elif filter == 'biggest': query = query.filter(Subreddit.subscribers > 10000) elif filter == 'active': query = query.filter(Subreddit.fp_submissions == 50).filter( Subreddit.all_age_latest <= 7).filter(Subreddit.over18 == False) elif filter == 'over18': query = query.filter(Subreddit.all_age_latest < 91).filter( Subreddit.over18 == True) elif filter == 'inactive': query = query.filter(Subreddit.all_age_latest < 360).filter( Subreddit.all_age_latest > 90) elif filter == 'dead': query = query.filter(Subreddit.all_age_latest > 360) elif filter == 'self': query = query.filter(Subreddit.selfposts > 20) elif filter == 'media': query = query.filter(Subreddit.fp_media > 20) elif filter == 'filter': q = request.args.get('q') t = request.args.get('t') l = request.args.get('l') or 1 o = request.args.get('o') if q and len(q) > 2: query = query.filter( or_(Subreddit.title.ilike('%' + q + '%'), Subreddit.url.ilike('%' + q + '%'), Subreddit.description.ilike('%' + q + '%'))) if t: tag = session.query(Tag).filter(Tag.name == t).first() query = query.filter(Subreddit.tags.any(SubredditTag.tag == tag)) query = query.filter(Subreddit.all_age_latest < l) if o and o != 'all': over18 = True if o == 'True' else False query = query.filter(Subreddit.over18 == o) else: query = query.filter(Subreddit.fp_submissions==25)\ .filter(Subreddit.all_age_latest==0)\ .filter(Subreddit.over18==False) if view == 'cloud': subreddits = query.order_by(Subreddit.url).all() response = serve_response('reddits.html', view=view, filter=filter, subreddits=subreddits, querystring=request.args) elif view == 'list': query = query.order_by(Subreddit.subscribers.desc()) pagination = Pagination(query, 50, page, 'reddits') response = serve_response('reddits.html', view=view, filter=filter, pagination=pagination, querystring=request.args) if not 'response' in locals(): raise NotFound cache.set(key, response, 3600) return response
def subreddit(request, name): subreddit = session.query(Subreddit).filter(Subreddit.url.ilike(name)).first() if not subreddit: raise NotFound return serve_response('api/subreddit.html', subreddit=subreddit)
def total_reddit_count(): return session.query(Subreddit).count()
def tag(request, name): tag = session.query(Tag).filter(Tag.name.ilike(name)).first() if not tag: raise NotFound return serve_response('tag.html', tag=tag)
def tags(request, view='cloud'): tags = session.query(Tag).order_by(Tag.name) return serve_response('tags.html', view=view, tags=tags)
def get_user(username): return session.query(User).filter_by(name=username).first()
def tags(request, view="cloud"): tags = session.query(Tag).order_by(Tag.name) return serve_response("tags.html", view=view, tags=tags)
def tag(request, name): tag = session.query(Tag).filter(Tag.name.ilike(name)).first() if not tag: raise NotFound return serve_response('api/tag.html', tag=tag)
def user(request, username): if request.user != username: raise NotFound() user = session.query(User).filter(User.name == username).first() return serve_response("user.html", user=user)
def subreddit(request, name): subreddit = session.query(Subreddit).filter( Subreddit.url.ilike(name)).first() if not subreddit: raise NotFound return serve_response('api/subreddit.html', subreddit=subreddit)
def subreddit(request, name): names = name.split("+") subreddits = session.query(Subreddit).filter(Subreddit.url.in_(names)).order_by(Subreddit.url).all() if not subreddits: raise NotFound return serve_response("subreddit.html", subreddits=subreddits)
def user(request, username): if request.user != username: raise NotFound() user = session.query(User).filter(User.name == username).first() return serve_response('user.html', user=user)