Ejemplo n.º 1
0
Archivo: user.py Proyecto: embr/mwstats
    def chars_deleted(self, start, end, ns=[0], rq=False):
        contribs_metric = Contribs()
        contribs = cache.get(str(contribs_metric), contribs_metric(user_name, cache))
        diff_metric = Diffs(self.rq)
        diffs = cache.get(str(diff_metric), diff_metric(user_name, cache))

        total = 0
        for contrib in contribs:
            if contrib['ns'] not in self.ns:
                continue
            diff = diffs[contrib['revid']]
            for op in diff:
                total += len(op['old'])
        return total
Ejemplo n.º 2
0
    def decorator(data, *args, **kwargs):
        if not base_config.options.server('secure_frontend_requests'):
            return func(data, *args, **kwargs)

        spa_token = request.cookies.get(COOKIE_SPA_TOKEN)

        if not cache.get(util.cache.format_key(util.cache.TAG_SPA_TOKEN, spa_token)):
            return util.response.error(
                status_code=403,
                message='Client context requirements not fulfilled.',
                failure='failure_bad_client',
            )

        return func(data, *args, **kwargs)
Ejemplo n.º 3
0
    def get(self):
        args = commnetParser.parse_args()
        title = args['title']

        if title is None:
            abort(400)

        comemnts = cache.get(title + "_comment")
        if comemnts is not None:
            return comments_serialize(comemnts)

        news = News.objects(title=title).only('comments').first()
        if news is None:
            abort(400)
        cache.set(title + "_comment", news.comments, timeout=360000)

        return comments_serialize(news.comments)
Ejemplo n.º 4
0
    def get(self):
        args = newsParser.parse_args()
        title = args['title']

        if title is None:
            abort(400)

        news = cache.get(title)
        if news is not None:
            return news_serialize(news)

        news = News.objects(title=title).exclude('comments').first()
        if news is None:
            abort(400)
        news.update(inc__news_views=1)
        cache.set(title, news, timeout=60)

        return news_serialize(news)
Ejemplo n.º 5
0
def wget(url, referer='', useCache=False, numTries=1, includeHeaders=False):
    """ 
    @param url URL to download.
    @param referer Defaults to ''.  If you pass None, it will be the same as
        the target URL.
    @param useCache whether or not a cached version is acceptable/desirable.
    @param numTries Number of retries in the event that an error occurs.
    @param includeHeaders When true, the return format will be in:
        `(data, responseHeaders)`.  When false, the return format will simply be
        `data`.
    """
    if includeHeaders:
        urlKey = 'headers+%s' % url
    else:
        urlKey = url

    if useCache: # and _cache.has_key(url) and len(_cache[url]):
        #return _cache[url]
        #if includeHeaders
        cached = cache.get(urlKey)
        if cached is not None:
            return cached

    if referer == '':
        referer = url
    opener = urllib2.build_opener()
    opener.addheaders = [
        ('User-Agent', USER_AGENT),
        ('Referer', referer),
        ('Accept-Encoding', 'gzip'),
    ]

    try:
        if DEBUG: print 'getting %s' % url

        response = opener.open(url)
        data = response.read()
        headers = response.info()
        # Make sure that anything containing uppercased keys has the equivalent
        # lower-cased version set.
        for k in headers:
            kLower = k.lower()
            if k != kLower:
                headers[kLower] = headers[k]

        # Attempt G-Zip extraction.  If it fails, no worries b/c there was
        # no compression.
        if 'content-encoding' in headers and 'gz' in headers['content-encoding'].lower():
            print 'ATTEMPTING DECOMPRESSION'
            try:
                compressedstream = StringIO.StringIO(data)
                gzipper = gzip.GzipFile(fileobj=compressedstream)
                data = gzipper.read()
            except IOError, e:
                print '[warn] wget - this was surprising, i couldn\'t decompress a response for url={%s} w/ cont-enc. value={%s}' % (url, headers['content-encoding'])
                print e
                pass

        if includeHeaders:
            out = (data, headers)
        else:
            out = data

        if useCache:
            #if len(_cache) > _cacheSizeLimit:
            #    del _cache[-1]
            #_cache[url] = data
            cache.set(url, out)

        return out