Example #1
0
def monitor(request):
    if request.method == "POST":
        key = request.form.get("keyword")
        if not key or len(key) < 5 or len(key) > 20:
            raise NotFound()
        keyword = session.query(Keyword).filter(Keyword.keyword.ilike(key)).first()
        if not keyword:
            chars = "1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM"
            hash = "".join([random.choice(chars) for c in range(5)])
            keyword = Keyword(key, hash)
            session.commit()
        if request.logged_in:
            user = session.query(User).filter(User.name == request.user).first()
            monitored = (
                session.query(Monitoring)
                .filter(Monitoring.user_uid == user.uid)
                .filter(Monitoring.keyword_uid == keyword.uid)
                .first()
            )
            if not monitored:
                m = Monitoring(user.uid, keyword.uid)
                session.commit()
        return redirect(url_for("monitor", hash=keyword.hash, slug=keyword.keyword))
    cachekey = _hash("views_content_monitor")
    cached = cache.get(cachekey)
    if cached:
        return cached
    keys = ["bozarking", "reddit search", "jailbait"]
    keywords = session.query(Keyword).filter(Keyword.keyword.in_(keys)).all()
    response = serve_response("monitor.html", keywords=keywords)
    cache.set(cachekey, response, 10 * 60)
    return response
Example #2
0
def monitor(request):
    if request.method == 'POST':
        key = request.form.get('keyword')
        if not key or len(key) < 5 or len(key) > 20:
            raise NotFound()
        keyword = session.query(Keyword).filter(
            Keyword.keyword.ilike(key)).first()
        if not keyword:
            chars = '1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'
            hash = ''.join([random.choice(chars) for c in range(5)])
            keyword = Keyword(key, hash)
            session.commit()
        if request.logged_in:
            user = session.query(User).filter(
                User.name == request.user).first()
            monitored = session.query(Monitoring).filter(
                Monitoring.user_uid == user.uid).filter(
                    Monitoring.keyword_uid == keyword.uid).first()
            if not monitored:
                m = Monitoring(user.uid, keyword.uid)
                session.commit()
        return redirect(
            url_for('monitor', hash=keyword.hash, slug=keyword.keyword))
    cachekey = _hash('views_content_monitor')
    cached = cache.get(cachekey)
    if cached:
        return cached
    keys = ['bozarking', 'reddit search', 'jailbait']
    keywords = session.query(Keyword).filter(Keyword.keyword.in_(keys)).all()
    response = serve_response('monitor.html', keywords=keywords)
    cache.set(cachekey, response, 10 * 60)
    return response
Example #3
0
def remove_monitoring(request, hash):
    user = session.query(User).filter(User.name==request.user).first()
    keyword = session.query(Keyword).filter(Keyword.hash==hash).first()
    if not user or not keyword:
        raise Forbidden()
    session.query(Monitoring).filter(Monitoring.user_uid==user.uid).filter(Monitoring.keyword_uid==keyword.uid).delete(synchronize_session=False)
    session.commit()
    return serve_json('')
Example #4
0
def remove_monitoring(request, hash):
    user = session.query(User).filter(User.name == request.user).first()
    keyword = session.query(Keyword).filter(Keyword.hash == hash).first()
    if not user or not keyword:
        raise Forbidden()
    session.query(Monitoring).filter(Monitoring.user_uid == user.uid).filter(
        Monitoring.keyword_uid == keyword.uid).delete(
            synchronize_session=False)
    session.commit()
    return serve_json('')
Example #5
0
def autocomplete(request, name):
    query = request.args.get('q')
    if not query:
        raise NotFound
    if name == 'tags':
        res = session.query(Tag).filter(Tag.name.ilike(query+'%')).limit(10)
        suggestions = '\n'.join(r.name for r in res)
    elif name == 'reddits':
        res = session.query(Subreddit).filter(Subreddit.url.ilike(query+'%')).order_by(Subreddit.subscribers.desc()).limit(10)
        suggestions = '\n'.join(r.url for r in res)
    return serve_text(suggestions)
Example #6
0
def autocomplete(request, name):
    query = request.args.get('q')
    if not query:
        raise NotFound
    if name == 'tags':
        res = session.query(Tag).filter(Tag.name.ilike(query + '%')).limit(10)
        suggestions = '\n'.join(r.name for r in res)
    elif name == 'reddits':
        res = session.query(Subreddit).filter(
            Subreddit.url.ilike(query + '%')).order_by(
                Subreddit.subscribers.desc()).limit(10)
        suggestions = '\n'.join(r.url for r in res)
    return serve_text(suggestions)
Example #7
0
def index(request):
    subreddits = session.query(Subreddit)\
                    .filter(Subreddit.subscribers>100)\
                    .filter(Subreddit.fp_submissions==50)\
                    .filter(Subreddit.all_age_latest<31)\
                    .filter(Subreddit.over18==False)\
                    .order_by(sql_random()).limit(30).all()
    logos = session.query(Subreddit)\
            .filter(Subreddit.logo==True)\
            .filter(Subreddit.all_age_latest<90)\
            .filter(Subreddit.over18==False)\
            .filter(Subreddit.subscribers>100)\
            .order_by(sql_random()).limit(30).all()
    return serve_response('index.html', subreddits=subreddits, logos=logos)
Example #8
0
 def get(self):
     http = httplib2.Http()
     headers = self._login()
     print headers
     images = SoupStrainer('img')
     subreddits = session.query(Subreddit).filter(Subreddit.logo==None).order_by(Subreddit.subscribers.desc()).all()
     for subreddit in subreddits:
         url = 'http://www.reddit.com/r/%s' % subreddit.url
         response, content = http.request(url, headers=headers)
         if response['status'] >= '500':
             self.delay.more_exp()
             print response['status'], subreddit.url
         elif response['status'] >= '400':
             subreddit.logo = False
             session.commit()
         else:
             self.delay.less()
             soup =  BeautifulSoup(content, parseOnlyThese=images)
             img_link = soup.findAll(id='header-img')[0]['src']
             if img_link == 'http://static.reddit.com/reddit.com.header.png':
                 subreddit.logo = False
             else:
                 try:
                     resp, img = http.request(img_link)
                     f = open(paths.logos + '/' + subreddit.url + '.png', "w")
                     f.write(img) 
                     f.close()
                     subreddit.logo = True
                 except:
                     print 'Saving image failed for %s.' % subreddit.url
             session.commit()
         self.delay.sleep()
Example #9
0
def subreddit(request, name):
    names = name.split('+')
    subreddits = session.query(Subreddit).filter(
        Subreddit.url.in_(names)).order_by(Subreddit.url).all()
    if not subreddits:
        raise NotFound
    return serve_response('subreddit.html', subreddits=subreddits)
Example #10
0
 def get(self):
     http = httplib2.Http()
     headers = self._login()
     print headers
     images = SoupStrainer('img')
     subreddits = session.query(Subreddit).filter(
         Subreddit.logo == None).order_by(
             Subreddit.subscribers.desc()).all()
     for subreddit in subreddits:
         url = 'http://www.reddit.com/r/%s' % subreddit.url
         response, content = http.request(url, headers=headers)
         if response['status'] >= '500':
             self.delay.more_exp()
             print response['status'], subreddit.url
         elif response['status'] >= '400':
             subreddit.logo = False
             session.commit()
         else:
             self.delay.less()
             soup = BeautifulSoup(content, parseOnlyThese=images)
             img_link = soup.findAll(id='header-img')[0]['src']
             if img_link == 'http://static.reddit.com/reddit.com.header.png':
                 subreddit.logo = False
             else:
                 try:
                     resp, img = http.request(img_link)
                     f = open(paths.logos + '/' + subreddit.url + '.png',
                              "w")
                     f.write(img)
                     f.close()
                     subreddit.logo = True
                 except:
                     print 'Saving image failed for %s.' % subreddit.url
             session.commit()
         self.delay.sleep()
Example #11
0
 def analyse_one(self, name):
     reddit = session.query(Subreddit).filter(Subreddit.url.ilike(name)).first()
     new = self._analyse(reddit.url)
     if not new:
         return
     self._set_new(reddit, new)
     return
Example #12
0
def cachedproxy(request, name):
    cached = cache.get('submissions_' + name)
    if cached:
        return cached
    subreddit = session.query(Subreddit).filter(
        Subreddit.url.ilike(name)).first()
    if not subreddit:
        raise NotFound
    http = httplib2.Http()
    uri = 'http://www.reddit.com/r/%s/.json?limit=5' % name
    response, content = http.request(uri, 'GET')
    if response['status'] == '200':
        out = json.loads(content)
        if out:
            out = out['data']['children']
        if not out:
            r = serve_json('there doesn\'t seem to be anything here.')
        else:
            subreddit.all_age_latest = unix_days_ago(
                out[0]['data']['created_utc'])
            session.commit()
            r = serve_response('api/submissions.html', submissions=out)
            cache.set('submissions_' + name, r, timeout=60 * 60)
    else:
        r = serve_json('fetching submissions failed.')
    return r
Example #13
0
 def analyse_one(self, name):
     reddit = session.query(Subreddit).filter(
         Subreddit.url.ilike(name)).first()
     new = self._analyse(reddit.url)
     if not new:
         return
     self._set_new(reddit, new)
     return
Example #14
0
def vote(request):
    user = session.query(User).filter_by(name=request.user).first()
    todo = request.form.get('todo')
    token = request.form.get('token')
    if not (todo and token):
        raise NotFound
    if not is_valid_token(todo.split('_')[1], token):
        raise Forbidden
    return exec_todo(request, user, todo)
Example #15
0
def vote(request):
    user = session.query(User).filter_by(name=request.user).first()
    todo = request.form.get('todo')
    token = request.form.get('token')
    if not (todo and token):
        raise NotFound
    if not is_valid_token(todo.split('_')[1], token):
        raise Forbidden
    return exec_todo(request, user, todo)    
Example #16
0
 def analyse_new(self):
     reddits = session.query(Subreddit).filter(Subreddit.updated==None).order_by(Subreddit.uid.asc()).all()
     self.logger.info('Analysing %s new reddits.' % len(reddits))
     for reddit in reddits:
         new = self._analyse(reddit.url)
         if not new:
             continue
         self._set_new(reddit, new)
     return
Example #17
0
def monitor_detail(request, hash, slug=None):
    if slug is not None and slug[-4:] == ".rss":
        template = "monitor-detail.xml"
        slug = slug[:-4]
    else:
        template = "monitor-detail.html"
    keyword = session.query(Keyword).filter(Keyword.hash == hash).first()
    if keyword is None:
        raise NotFound()
    return serve_response(template, keyword=keyword)
Example #18
0
def monitor_detail(request, hash, slug=None):
    if slug is not None and slug[-4:] == '.rss':
        template = 'monitor-detail.xml'
        slug = slug[:-4]
    else:
        template = 'monitor-detail.html'
    keyword = session.query(Keyword).filter(Keyword.hash == hash).first()
    if keyword is None:
        raise NotFound()
    return serve_response(template, keyword=keyword)
Example #19
0
 def analyse_new(self):
     reddits = session.query(Subreddit).filter(
         Subreddit.updated == None).order_by(Subreddit.uid.asc()).all()
     self.logger.info('Analysing %s new reddits.' % len(reddits))
     for reddit in reddits:
         new = self._analyse(reddit.url)
         if not new:
             continue
         self._set_new(reddit, new)
     return
Example #20
0
def fix_relative_links():
    import re
    from myapp.env import session
    from myapp.models import Subreddit

    make_app()
    reddits = session.query(Subreddit).filter(Subreddit.description.ilike("%]\(%")).all()
    for r in reddits:
        print r.url
        r.description = re.sub(r"\]\\\(", "](", r.description)
        session.commit()
Example #21
0
def fix_relative_links():
    import re
    from myapp.env import session
    from myapp.models import Subreddit
    make_app()
    reddits = session.query(Subreddit).filter(
        Subreddit.description.ilike('%]\(%')).all()
    for r in reddits:
        print r.url
        r.description = re.sub(r'\]\\\(', '](', r.description)
        session.commit()
Example #22
0
def add_tag(request):
    user = session.query(User).filter_by(name=request.user).first()
    todo = request.form.get('todo')
    token = request.form.get('token')
    if not (todo and token):
        raise NotFound
    if not is_valid_token(todo.split('_')[1], token):
        raise Forbidden
    name = '_'.join(todo.split('_')[2:])
    if len(name) > 20 or re.search(disallowed_chars, name):
        raise Forbidden
    return exec_todo(request, user, todo)
Example #23
0
 def analyse_all(self):
     empty = {'ups':None, 'downs':None, 'comments':None, 'media':None, 
              'submissions':None, 'selfposts':None, 'oldest':0, 'latest':0}
     reddits = session.query(Subreddit).order_by(Subreddit.uid.asc()).all()
     self.logger.info('Analysing all (%s) reddits.' % len(reddits))
     for reddit in reddits:
         new = self._analyse(reddit.url)
         if new:
             self._set_new(reddit, new)
         else:
             self._set_new(reddit, empty)
     return
Example #24
0
def add_tag(request):
    user = session.query(User).filter_by(name=request.user).first()
    todo = request.form.get('todo')
    token = request.form.get('token')
    if not (todo and token):
        raise NotFound
    if not is_valid_token(todo.split('_')[1], token):
        raise Forbidden
    name = '_'.join(todo.split('_')[2:])
    if len(name) > 20 or re.search(disallowed_chars, name):
        raise Forbidden
    return exec_todo(request, user, todo)
Example #25
0
def index(request):
    subreddits = (
        session.query(Subreddit)
        .filter(Subreddit.subscribers > 100)
        .filter(Subreddit.fp_submissions == 50)
        .filter(Subreddit.all_age_latest < 31)
        .filter(Subreddit.over18 == False)
        .order_by(sql_random())
        .limit(30)
        .all()
    )
    logos = (
        session.query(Subreddit)
        .filter(Subreddit.logo == True)
        .filter(Subreddit.all_age_latest < 90)
        .filter(Subreddit.over18 == False)
        .filter(Subreddit.subscribers > 100)
        .order_by(sql_random())
        .limit(30)
        .all()
    )
    return serve_response("index.html", subreddits=subreddits, logos=logos)
Example #26
0
def exec_todo(request, user, todo):
    r = todo.split('_')
    type = r[0]
    id = r[1]
    action = '_'.join(r[2:])
    if type == 'vote':
        subreddit_tag = session.query(SubredditTag).filter(
            SubredditTag.uid == id).first()
        if not subreddit_tag:
            raise NotFound
        vote = session.query(Vote).filter(Vote.subreddit_tag_id == id).filter(
            Vote.user_id == user.uid).first()
        if not vote:
            vote = Vote(subreddit_tag.uid, user.uid)
        if action == 'up':
            vote.up()
        elif action == 'down':
            vote.down()
        else:
            raise NotFound
        session.commit()
        return serve_response('api/bigtag.html', tag=subreddit_tag)
    elif type == 'tag':
        subreddit = session.query(Subreddit).filter_by(id=id).first()
        tag = session.query(Tag).filter_by(name=action).first()
        if not tag:
            tag = Tag()
            tag.user_id = user.uid
            tag.name = action
            session.commit()
        #see if this reddit is tagged already
        subreddit_tag = session.query(SubredditTag)\
                                .filter(SubredditTag.tag_id==tag.uid)\
                                .filter(SubredditTag.subreddit_id==subreddit.uid).first()
        if subreddit_tag:
            #upvote
            vote = session.query(Vote)\
                            .filter(Vote.subreddit_tag_id==subreddit_tag.uid)\
                            .filter(Vote.user_id==user.uid).first()
            if not vote:
                vote = Vote(subreddit_tag.uid, user.uid)
            vote.up()
            session.commit()
            return serve_response('api/bigtag.html', tag=subreddit_tag)
        subreddit_tag = SubredditTag()
        subreddit_tag.tag_id = tag.uid
        subreddit_tag.subreddit_id = subreddit.uid
        subreddit_tag.user_id = user.uid
        subreddit.tags.append(subreddit_tag)
        session.commit()
        vote = Vote(subreddit_tag.uid, user.uid)
        vote.up()
        session.commit()
        return serve_response('api/bigtag.html', tag=subreddit_tag)
    else:
        raise NotFound
Example #27
0
def exec_todo(request, user, todo):
    r = todo.split('_');
    type = r[0]
    id = r[1]
    action = '_'.join(r[2:])
    if type == 'vote':
        subreddit_tag = session.query(SubredditTag).filter(SubredditTag.uid==id).first()
        if not subreddit_tag:
            raise NotFound
        vote = session.query(Vote).filter(Vote.subreddit_tag_id==id).filter(Vote.user_id==user.uid).first()
        if not vote:
            vote = Vote(subreddit_tag.uid, user.uid)
        if action == 'up':
            vote.up()
        elif action == 'down':
            vote.down()
        else:
            raise NotFound
        session.commit()
        return serve_response('api/bigtag.html', tag=subreddit_tag)
    elif type == 'tag':
        subreddit = session.query(Subreddit).filter_by(id=id).first()
        tag = session.query(Tag).filter_by(name=action).first()
        if not tag:
            tag = Tag()
            tag.user_id = user.uid
            tag.name = action
            session.commit()
        #see if this reddit is tagged already
        subreddit_tag = session.query(SubredditTag)\
                                .filter(SubredditTag.tag_id==tag.uid)\
                                .filter(SubredditTag.subreddit_id==subreddit.uid).first()
        if subreddit_tag:
            #upvote
            vote = session.query(Vote)\
                            .filter(Vote.subreddit_tag_id==subreddit_tag.uid)\
                            .filter(Vote.user_id==user.uid).first()
            if not vote:
                vote = Vote(subreddit_tag.uid, user.uid)
            vote.up()
            session.commit()
            return serve_response('api/bigtag.html', tag=subreddit_tag)
        subreddit_tag = SubredditTag()
        subreddit_tag.tag_id = tag.uid
        subreddit_tag.subreddit_id = subreddit.uid
        subreddit_tag.user_id = user.uid
        subreddit.tags.append(subreddit_tag)
        session.commit()
        vote = Vote(subreddit_tag.uid, user.uid)
        vote.up()
        session.commit()
        return serve_response('api/bigtag.html', tag=subreddit_tag)
    else:
        raise NotFound
Example #28
0
 def cleandb(self):
     for k in self._keywords():
         if k.accessed < (datetime.utcnow() - timedelta(30)):
             session.query(Mention)\
                 .filter(Mention.keyword_uid==k.uid)\
                 .delete(synchronize_session=False)
             session.query(Monitoring)\
                 .filter(Monitoring.keyword_uid==k.uid)\
                 .delete(synchronize_session=False)
             session.query(Keyword)\
                 .filter(Keyword.uid==k.uid)\
                 .delete(synchronize_session=False)
         elif len(k.mentions) > 50:
             m = session.query(Mention)\
                     .filter(Mention.keyword_uid==k.uid)\
                     .order_by(Mention.created.desc())\
                     .offset(49).first()
             session.query(Mention)\
                 .filter(Mention.keyword_uid==k.uid)\
                 .filter(Mention.uid<m.uid)\
                 .delete(synchronize_session=False)
     session.commit()
Example #29
0
 def cleandb(self):
     for k in self._keywords():
         if k.accessed < (datetime.utcnow() - timedelta(30)):
             session.query(Mention)\
                 .filter(Mention.keyword_uid==k.uid)\
                 .delete(synchronize_session=False)
             session.query(Monitoring)\
                 .filter(Monitoring.keyword_uid==k.uid)\
                 .delete(synchronize_session=False)
             session.query(Keyword)\
                 .filter(Keyword.uid==k.uid)\
                 .delete(synchronize_session=False)
         elif len(k.mentions) > 50:
             m = session.query(Mention)\
                     .filter(Mention.keyword_uid==k.uid)\
                     .order_by(Mention.created.desc())\
                     .offset(49).first()
             session.query(Mention)\
                 .filter(Mention.keyword_uid==k.uid)\
                 .filter(Mention.uid<m.uid)\
                 .delete(synchronize_session=False)
     session.commit()
Example #30
0
 def analyse_all(self):
     empty = {
         'ups': None,
         'downs': None,
         'comments': None,
         'media': None,
         'submissions': None,
         'selfposts': None,
         'oldest': 0,
         'latest': 0
     }
     reddits = session.query(Subreddit).order_by(Subreddit.uid.asc()).all()
     self.logger.info('Analysing all (%s) reddits.' % len(reddits))
     for reddit in reddits:
         new = self._analyse(reddit.url)
         if new:
             self._set_new(reddit, new)
         else:
             self._set_new(reddit, empty)
     return
Example #31
0
def logos(request, view="random"):
    per_page = 30
    query = session.query(Subreddit).filter(Subreddit.logo == True)
    if view == "all":
        page = int(request.args.get("page", 1))
        query = query.filter(Subreddit.over18 == False).order_by(Subreddit.subscribers.desc())
        pagination = Pagination(query, per_page, page, "logos")
        return serve_response("logos.html", pagination=pagination, view="all")
    elif view == "over18":
        page = int(request.args.get("page", 1))
        query = query.filter(Subreddit.over18 == True).order_by(Subreddit.subscribers.desc())
        pagination = Pagination(query, per_page, page, "logos")
        return serve_response("logos.html", pagination=pagination, view="over18")
    else:
        logos = (
            query.filter(Subreddit.over18 == False)
            .filter(Subreddit.subscribers > 100)
            .order_by(sql_random())
            .limit(per_page)
            .all()
        )
        return serve_response("logos.html", logos=logos, view="random")
Example #32
0
def logos(request, view='random'):
    per_page = 30
    query = session.query(Subreddit).filter(Subreddit.logo == True)
    if view == 'all':
        page = int(request.args.get('page', 1))
        query = query.filter(Subreddit.over18 == False).order_by(
            Subreddit.subscribers.desc())
        pagination = Pagination(query, per_page, page, 'logos')
        return serve_response('logos.html', pagination=pagination, view='all')
    elif view == 'over18':
        page = int(request.args.get('page', 1))
        query = query.filter(Subreddit.over18 == True).order_by(
            Subreddit.subscribers.desc())
        pagination = Pagination(query, per_page, page, 'logos')
        return serve_response('logos.html',
                              pagination=pagination,
                              view='over18')
    else:
        logos = query.filter(Subreddit.over18 == False).filter(
            Subreddit.subscribers > 100).order_by(
                sql_random()).limit(per_page).all()
        return serve_response('logos.html', logos=logos, view='random')
Example #33
0
 def get_new(self):
     new_reddits = 0
     first_uri = 'http://www.reddit.com/reddits/new/.json'
     current_uri = first_uri
     counter = 0
     while True:
         page = self._get_json(current_uri)
         if not page:
             self.logger.error(
                 'ERROR retrieving page %s. Spidering aborted.\n'
                 '%s reddits scanned.\n%s new reddits found.' %
                 (current_uri, counter, new_reddits))
             return
         reddits = page['data']['children']
         for reddit in reddits:
             reddit = reddit['data']
             id = reddit['id']
             s = session.query(Subreddit).filter_by(id=id).first()
             if not s:
                 self.logger.info('new subreddit: %s' % reddit['url'])
                 new_reddits += 1
                 s = Subreddit()
             s.name = reddit['name']
             s.created = unix_string(int(reddit['created']))
             s.url = reddit['url'][3:-1]
             s.title = reddit['title']
             s.over18 = reddit['over18']
             s.subscribers = reddit['subscribers']
             s.id = reddit['id']
             s.description = reddit['description']
             session.commit()
         counter += len(reddits)
         after = page['data']['after']
         current_uri = '%s?count=%s&after=%s' % (first_uri, counter, after)
         if not after:
             self.logger.info('Finished spidering.\n'
                              '%s reddits scanned.\n%s new reddits found.' %
                              (counter, new_reddits))
             return
Example #34
0
 def get_new(self):
     new_reddits = 0
     first_uri = 'http://www.reddit.com/reddits/new/.json' 
     current_uri = first_uri
     counter = 0
     while True:
         page = self._get_json(current_uri)
         if not page:
             self.logger.error('ERROR retrieving page %s. Spidering aborted.\n'
                     '%s reddits scanned.\n%s new reddits found.' 
                     % (current_uri, counter, new_reddits))
             return
         reddits = page['data']['children']
         for reddit in reddits:
             reddit = reddit['data']
             id = reddit['id']
             s = session.query(Subreddit).filter_by(id=id).first()
             if not s: 
                 self.logger.info('new subreddit: %s' % reddit['url'])
                 new_reddits += 1
                 s = Subreddit()
             s.name = reddit['name']
             s.created = unix_string(int(reddit['created']))
             s.url = reddit['url'][3:-1]
             s.title = reddit['title']
             s.over18 = reddit['over18']
             s.subscribers = reddit['subscribers']
             s.id = reddit['id']
             s.description = reddit['description'] 
             session.commit()
         counter += len(reddits)
         after = page['data']['after']
         current_uri = '%s?count=%s&after=%s' % (first_uri, counter, after)
         if not after:
             self.logger.info('Finished spidering.\n'
                     '%s reddits scanned.\n%s new reddits found.' 
                     % (counter, new_reddits))
             return
Example #35
0
def cachedproxy(request, name):
    cached = cache.get('submissions_' + name)
    if cached:
        return cached
    subreddit = session.query(Subreddit).filter(Subreddit.url.ilike(name)).first()
    if not subreddit:
        raise NotFound
    http = httplib2.Http()
    uri = 'http://www.reddit.com/r/%s/.json?limit=5' % name
    response, content = http.request(uri, 'GET')
    if response['status'] == '200':
        out = json.loads(content)
        if out:
            out = out['data']['children']
        if not out:
            r = serve_json('there doesn\'t seem to be anything here.')
        else:
            subreddit.all_age_latest = unix_days_ago(out[0]['data']['created_utc'])
            session.commit()
            r = serve_response('api/submissions.html', submissions=out)
            cache.set('submissions_' + name, r, timeout=60*60)
    else:
        r = serve_json('fetching submissions failed.')
    return r
Example #36
0
def reddits(request, view="cloud", filter="biggest"):
    key = _hash("view_reddits_" + filter + view + ";".join(["%s:%s" % (k, v) for k, v in request.args.items()]))
    value = cache.get(key)
    if value is not None:
        return value
    subscribers = request.args.get("s") or 100
    page = int(request.args.get("page", 1))
    query = session.query(Subreddit).filter(Subreddit.subscribers >= subscribers).filter(Subreddit.fp_submissions > 0)
    if filter == "new":
        query = query.filter(Subreddit.created > days_ago(90))
    elif filter == "biggest":
        query = query.filter(Subreddit.subscribers > 10000)
    elif filter == "active":
        query = (
            query.filter(Subreddit.fp_submissions == 50)
            .filter(Subreddit.all_age_latest <= 7)
            .filter(Subreddit.over18 == False)
        )
    elif filter == "over18":
        query = query.filter(Subreddit.all_age_latest < 91).filter(Subreddit.over18 == True)
    elif filter == "inactive":
        query = query.filter(Subreddit.all_age_latest < 360).filter(Subreddit.all_age_latest > 90)
    elif filter == "dead":
        query = query.filter(Subreddit.all_age_latest > 360)
    elif filter == "self":
        query = query.filter(Subreddit.selfposts > 20)
    elif filter == "media":
        query = query.filter(Subreddit.fp_media > 20)
    elif filter == "filter":
        q = request.args.get("q")
        t = request.args.get("t")
        l = request.args.get("l") or 1
        o = request.args.get("o")
        if q and len(q) > 2:
            query = query.filter(
                or_(
                    Subreddit.title.ilike("%" + q + "%"),
                    Subreddit.url.ilike("%" + q + "%"),
                    Subreddit.description.ilike("%" + q + "%"),
                )
            )
        if t:
            tag = session.query(Tag).filter(Tag.name == t).first()
            query = query.filter(Subreddit.tags.any(SubredditTag.tag == tag))
        query = query.filter(Subreddit.all_age_latest < l)
        if o and o != "all":
            over18 = True if o == "True" else False
            query = query.filter(Subreddit.over18 == o)
    else:
        query = (
            query.filter(Subreddit.fp_submissions == 25)
            .filter(Subreddit.all_age_latest == 0)
            .filter(Subreddit.over18 == False)
        )
    if view == "cloud":
        subreddits = query.order_by(Subreddit.url).all()
        response = serve_response(
            "reddits.html", view=view, filter=filter, subreddits=subreddits, querystring=request.args
        )
    elif view == "list":
        query = query.order_by(Subreddit.subscribers.desc())
        pagination = Pagination(query, 50, page, "reddits")
        response = serve_response(
            "reddits.html", view=view, filter=filter, pagination=pagination, querystring=request.args
        )
    if not "response" in locals():
        raise NotFound
    cache.set(key, response, 3600)
    return response
Example #37
0
 def _keywords(self):
     keywords = session.query(Keyword).order_by(Keyword.keyword.asc()).all()
     return keywords
Example #38
0
def reddits(request, view='cloud', filter='biggest'):
    key = _hash('view_reddits_' + filter + view +
                ';'.join(['%s:%s' % (k, v) for k, v in request.args.items()]))
    value = cache.get(key)
    if value is not None:
        return value
    subscribers = request.args.get('s') or 100
    page = int(request.args.get('page', 1))
    query = session.query(Subreddit).filter(
        Subreddit.subscribers >= subscribers).filter(
            Subreddit.fp_submissions > 0)
    if filter == 'new':
        query = query.filter(Subreddit.created > days_ago(90))
    elif filter == 'biggest':
        query = query.filter(Subreddit.subscribers > 10000)
    elif filter == 'active':
        query = query.filter(Subreddit.fp_submissions == 50).filter(
            Subreddit.all_age_latest <= 7).filter(Subreddit.over18 == False)
    elif filter == 'over18':
        query = query.filter(Subreddit.all_age_latest < 91).filter(
            Subreddit.over18 == True)
    elif filter == 'inactive':
        query = query.filter(Subreddit.all_age_latest < 360).filter(
            Subreddit.all_age_latest > 90)
    elif filter == 'dead':
        query = query.filter(Subreddit.all_age_latest > 360)
    elif filter == 'self':
        query = query.filter(Subreddit.selfposts > 20)
    elif filter == 'media':
        query = query.filter(Subreddit.fp_media > 20)
    elif filter == 'filter':
        q = request.args.get('q')
        t = request.args.get('t')
        l = request.args.get('l') or 1
        o = request.args.get('o')
        if q and len(q) > 2:
            query = query.filter(
                or_(Subreddit.title.ilike('%' + q + '%'),
                    Subreddit.url.ilike('%' + q + '%'),
                    Subreddit.description.ilike('%' + q + '%')))
        if t:
            tag = session.query(Tag).filter(Tag.name == t).first()
            query = query.filter(Subreddit.tags.any(SubredditTag.tag == tag))
        query = query.filter(Subreddit.all_age_latest < l)
        if o and o != 'all':
            over18 = True if o == 'True' else False
            query = query.filter(Subreddit.over18 == o)
    else:
        query = query.filter(Subreddit.fp_submissions==25)\
                         .filter(Subreddit.all_age_latest==0)\
                         .filter(Subreddit.over18==False)
    if view == 'cloud':
        subreddits = query.order_by(Subreddit.url).all()
        response = serve_response('reddits.html',
                                  view=view,
                                  filter=filter,
                                  subreddits=subreddits,
                                  querystring=request.args)
    elif view == 'list':
        query = query.order_by(Subreddit.subscribers.desc())
        pagination = Pagination(query, 50, page, 'reddits')
        response = serve_response('reddits.html',
                                  view=view,
                                  filter=filter,
                                  pagination=pagination,
                                  querystring=request.args)
    if not 'response' in locals():
        raise NotFound
    cache.set(key, response, 3600)
    return response
Example #39
0
def subreddit(request, name):
    subreddit = session.query(Subreddit).filter(Subreddit.url.ilike(name)).first()
    if not subreddit:
        raise NotFound
    return serve_response('api/subreddit.html', subreddit=subreddit)
Example #40
0
 def _keywords(self):
     keywords = session.query(Keyword).order_by(Keyword.keyword.asc()).all()
     return keywords
Example #41
0
def total_reddit_count():
    return session.query(Subreddit).count()
Example #42
0
def tag(request, name):
    tag = session.query(Tag).filter(Tag.name.ilike(name)).first()
    if not tag:
        raise NotFound
    return serve_response('tag.html', tag=tag)
Example #43
0
def tags(request, view='cloud'):
    tags = session.query(Tag).order_by(Tag.name)
    return serve_response('tags.html', view=view, tags=tags)
Example #44
0
def get_user(username):
    return session.query(User).filter_by(name=username).first()
Example #45
0
def tags(request, view="cloud"):
    tags = session.query(Tag).order_by(Tag.name)
    return serve_response("tags.html", view=view, tags=tags)
Example #46
0
def tag(request, name):
    tag = session.query(Tag).filter(Tag.name.ilike(name)).first()
    if not tag:
        raise NotFound
    return serve_response('api/tag.html', tag=tag)
Example #47
0
def user(request, username):
    if request.user != username:
        raise NotFound()
    user = session.query(User).filter(User.name == username).first()
    return serve_response("user.html", user=user)
Example #48
0
def subreddit(request, name):
    subreddit = session.query(Subreddit).filter(
        Subreddit.url.ilike(name)).first()
    if not subreddit:
        raise NotFound
    return serve_response('api/subreddit.html', subreddit=subreddit)
Example #49
0
def subreddit(request, name):
    names = name.split("+")
    subreddits = session.query(Subreddit).filter(Subreddit.url.in_(names)).order_by(Subreddit.url).all()
    if not subreddits:
        raise NotFound
    return serve_response("subreddit.html", subreddits=subreddits)
Example #50
0
def user(request, username):
    if request.user != username:
        raise NotFound()
    user = session.query(User).filter(User.name == username).first()
    return serve_response('user.html', user=user)