Esempio n. 1
0
def _cache_fetch_large_data(cache, key, compress_large_data):
    chunk_count = cache.get(make_cache_key(key))
    data = []

    chunk_keys = [
        make_cache_key('%s-%d' % (key, i)) for i in range(int(chunk_count))
    ]
    chunks = cache.get_many(chunk_keys)
    for chunk_key in chunk_keys:
        try:
            data.append(chunks[chunk_key][0])
        except KeyError:
            logging.debug('Cache miss for key %s.' % chunk_key)
            raise MissingChunkError

    data = b''.join(data)

    if compress_large_data:
        data = zlib.decompress(data)

    try:
        unpickler = pickle.Unpickler(StringIO(data))
        data = unpickler.load()
    except Exception as e:
        logging.warning('Unpickle error for cache key "%s": %s.' % (key, e))
        raise e

    return data
Esempio n. 2
0
def thumbnail(file, size='400x100'):
    """
    Creates a thumbnail of an image with the specified size, returning
    the URL of the thumbnail.
    """
    x, y = [int(x) for x in size.split('x')]

    filename = file.name
    if filename.find(".") != -1:
        basename, format = filename.rsplit('.', 1)
        miniature = '%s_%s.%s' % (basename, size, format)
    else:
        basename = filename
        miniature = '%s_%s' % (basename, size)

    storage = file.storage

    if not storage.exists(miniature):
        try:
            file = storage.open(filename, 'rb')
            data = StringIO(file.read())
            file.close()

            image = Image.open(data)
            image.thumbnail([x, y], Image.ANTIALIAS)

            save_image_to_storage(image, storage, miniature)
        except (IOError, KeyError) as e:
            logging.error('Error thumbnailing image file %s and saving '
                          'as %s: %s' % (filename, miniature, e),
                          exc_info=1)
            return ""

    return storage.url(miniature)
Esempio n. 3
0
def crop_image(file, x, y, width, height):
    """
    Crops an image at the specified coordinates and dimensions, returning the
    resulting URL of the cropped image.
    """
    filename = file.name
    storage = file.storage
    basename = filename

    if filename.find(".") != -1:
        basename = filename.rsplit('.', 1)[0]
    new_name = '%s_%d_%d_%d_%d.png' % (basename, x, y, width, height)


    if not storage.exists(new_name):
        try:
            file = storage.open(filename)
            data = StringIO(file.read())
            file.close()

            image = Image.open(data)
            image = image.crop((x, y, x + width, y + height))

            save_image_to_storage(image, storage, new_name)
        except (IOError, KeyError) as e:
            logging.error('Error cropping image file %s at %d, %d, %d, %d '
                          'and saving as %s: %s' %
                          (filename, x, y, width, height, new_name, e),
                          exc_info=1)
            return ""

    return storage.url(new_name)
Esempio n. 4
0
    def process_response(self, request, response):
        """
        Handler for processing a response. Dumps the profiling information
        to the profile log file.
        """
        timedloginfo = getattr(request, '_page_timedloginfo', None)

        if timedloginfo:
            timedloginfo.done()

        if ('profiling' in request.GET
                and getattr(settings, "LOGGING_ALLOW_PROFILING", False)):

            init_profile_logger()

            from djblets.util.compat.six.moves import cStringIO as StringIO
            self.profiler.create_stats()

            # Capture the stats
            out = StringIO()
            old_stdout, sys.stdout = sys.stdout, out
            self.profiler.print_stats(1)
            sys.stdout = old_stdout

            profile_log = logging.getLogger("profile")
            profile_log.log(logging.INFO,
                            "Profiling results for %s (HTTP %s):",
                            request.path, request.method)
            profile_log.log(logging.INFO, out.getvalue().strip())

            profile_log.log(logging.INFO, '%d database queries made\n',
                            len(connection.queries))

            queries = {}
            for query in connection.queries:
                sql = reformat_sql(query['sql'])
                stack = ''.join(query['stack'][:-1])
                time = query['time']
                if sql in queries:
                    queries[sql].append((time, stack))
                else:
                    queries[sql] = [(time, stack)]

            times = {}
            for sql, entries in six.iteritems(queries):
                time = sum((float(entry[0]) for entry in entries))
                tracebacks = '\n\n'.join((entry[1] for entry in entries))
                times[time] = \
                    'SQL Query profile (%d times, %.3fs average)\n%s\n\n%s\n\n' % \
                    (len(entries), time / len(entries), sql, tracebacks)

            sorted_times = sorted(six.iterkeys(times), reverse=1)
            for time in sorted_times:
                profile_log.log(logging.INFO, times[time])

        return response
Esempio n. 5
0
    def _render(self):
        buffer = StringIO()
        self.obj.file.open()
        markdown.markdownFromFile(input=self.obj.file,
                                  output=buffer,
                                  output_format='xhtml1',
                                  safe_mode='escape')
        rendered = buffer.getvalue()
        buffer.close()

        return rendered
Esempio n. 6
0
    def encode(self, o, *args, **kwargs):
        self.level = 0
        self.doIndent = False

        stream = StringIO()
        self.xml = XMLGenerator(stream, settings.DEFAULT_CHARSET)
        self.xml.startDocument()
        self.startElement("rsp")
        self.__encode(o, *args, **kwargs)
        self.endElement("rsp")
        self.xml.endDocument()
        self.xml = None

        return stream.getvalue()
Esempio n. 7
0
def _cache_store_large_data(cache, key, data, expiration, compress_large_data):
    # We store large data in the cache broken into chunks that are 1M in size.
    # To do this easily, we first pickle the data and compress it with zlib.
    # This gives us a string which can be chunked easily. These are then stored
    # individually in the cache as single-element lists (so the cache backend
    # doesn't try to convert binary data to utf8). The number of chunks needed
    # is stored in the cache under the unadorned key
    file = StringIO()
    pickler = pickle.Pickler(file)
    pickler.dump(data)
    data = file.getvalue()

    if compress_large_data:
        data = zlib.compress(data)

    i = 0
    while len(data) > CACHE_CHUNK_SIZE:
        chunk = data[0:CACHE_CHUNK_SIZE]
        data = data[CACHE_CHUNK_SIZE:]
        cache.set(make_cache_key('%s-%d' % (key, i)), [chunk], expiration)
        i += 1
    cache.set(make_cache_key('%s-%d' % (key, i)), [data], expiration)

    cache.set(make_cache_key(key), '%d' % (i + 1), expiration)