Beispiel #1
0
	def __init__ (self, message = None, use_compress = False):
		self.closed = False		
		self.use_compress = use_compress
		self.compressor = compressors.GZipCompressor ()
		self.message = message
		self.serialized = []
		self.content_length = 0
Beispiel #2
0
 def compress(self, data):
     if len(data) <= 2048:
         return data
     f = compressors.GZipCompressor()
     data = f.compress(data) + f.flush()
     self.headers["Content-Encoding"] = "gzip"
     return data
Beispiel #3
0
    def save(self, key, content_type, content, max_age, compressed=0):
        if self.max_memory == 0 or not self.max_disk == 0:
            return

        usage = len(content)
        if usage > 10000000:
            return

        if len(str(max_age)) > 12 or len(content_type) > 64:
            return

        # check memory status
        with self.lock:
            current_memory = self.current_memory.get()[1]
            current_disk = self.current_disk.get()[1]
        if current_memory > self.max_memory and current_disk > self.max_disk:
            # there's no memory/disk room for cache
            return self.maintern(initial)

        current_time = int(time.time())
        path, initial, fn = self.getpath(key)
        try:
            self.files[initial]
        except KeyError:
            self.files[initial] = {}
        else:
            with self.lock:
                last_maintern = self.maintern_times.get(initial, 0.)
            if last_maintern == 0:
                self.maintern_times[initial] = current_time
            elif current_time - last_maintern > self.maintern_interval:
                self.maintern(initial)

        # already have valid cache
        cached = self.files[initial].get(fn)
        if cached:  # already have
            return

        if not compressed and content_type.startswith("text/"):
            compressor = compressors.GZipCompressor()
            content = compressor.compress(content) + compressor.flush()
            compressed = 1
            usage = len(content)

        if current_memory <= self.max_memory:
            usage *= 1.5
            self.files[initial][fn] = (current_time, 1, usage, compressed,
                                       max_age, content_type, content)
            with self.lock:
                self.current_memory.inc(usage)
            return

        if self.max_disk:
            f = open(path, "wb")
            if not content_type:
                content_type = b""
            f.write(("%12s%d%64s" %
                     (max_age, compressed, content_type)).encode("utf8"))
            f.write(content)
            f.close()
            self.files[initial][fn] = (current_time, -1, usage, compressed,
                                       max_age)
            with self.lock:
                self.current_disk.inc(usage)
Beispiel #4
0
    def done(self, force_close=False, upgrade_to=None, with_header=1):
        self.content_type = self.get('content-type')

        if not self.is_responsable(): return
        self._is_done = True
        if self.request.channel is None: return

        self.htime = (time.time() - self.stime) * 1000
        self.stime = time.time()  #for delivery time

        # compress payload and globbing production
        do_optimize = True
        if upgrade_to or self.is_async_streaming():
            do_optimize = False

        connection = http_util.get_header(http_util.CONNECTION,
                                          self.request.header).lower()
        close_it = False
        way_to_compress = ""
        wrap_in_chunking = False

        if force_close:
            close_it = True
            if self.request.version == '1.1':
                self.update('Connection', 'close')
            else:
                self.delete('Connection')

        else:
            if self.request.version == '1.0':
                if connection == 'keep-alive':
                    if not self.has_key('content-length'):
                        close_it = True
                        self.update('Connection', 'close')
                    else:
                        self.update('Connection', 'keep-alive')
                else:
                    close_it = True

            elif self.request.version == '1.1':
                if connection == 'close':
                    close_it = True
                    self.update('Connection', 'close')
                if not self.has_key('transfer-encoding') and not self.has_key(
                        'content-length') and self.has_key('content-type'):
                    wrap_in_chunking = True

            else:
                # unknown close
                self.update('Connection', 'close')
                close_it = True

        if len(self.outgoing) == 0:
            self.update('Content-Length', "0")
            self.delete('transfer-encoding')
            self.delete('content-type')
            outgoing_producer = producers.simple_producer(
                self.build_reply_header(with_header).encode("utf8"))
            do_optimize = False

        elif len(self.outgoing) == 1 and hasattr(self.outgoing.first(),
                                                 "ready"):
            outgoing_producer = producers.composite_producer(self.outgoing)
            if wrap_in_chunking:
                self.update('Transfer-Encoding', 'chunked')
                outgoing_producer = producers.chunked_producer(
                    outgoing_producer)
            outgoing_header = producers.simple_producer(
                self.build_reply_header(with_header).encode("utf8"))
            self.request.channel.push_with_producer(outgoing_header)
            do_optimize = False

        elif do_optimize and not self.has_key('Content-Encoding'):
            maybe_compress = self.request.get_header("Accept-Encoding")
            if maybe_compress:
                cl = self.has_key("content-length") and int(
                    self.get("Content-Length")) or -1
                if cl == -1:
                    cl = self.outgoing.get_estimate_content_length()

                if 0 < cl <= UNCOMPRESS_MAX:
                    maybe_compress = ""
                elif not wrap_in_chunking and cl > ONETIME_COMPRESS_MAX:
                    # too big for memory, do not compress
                    maybe_compress = ""

            if maybe_compress:
                content_type = self.get("Content-Type")
                if content_type and (
                        content_type.startswith("text/")
                        or content_type.startswith("application/json")):
                    accept_encoding = [
                        x.strip() for x in maybe_compress.split(",")
                    ]
                    if "gzip" in accept_encoding:
                        way_to_compress = "gzip"
                    elif "deflate" in accept_encoding:
                        way_to_compress = "deflate"

            if way_to_compress:
                if self.has_key('Content-Length'):
                    self.delete("content-length")  # rebuild
                self.update('Content-Encoding', way_to_compress)

            if wrap_in_chunking:
                outgoing_producer = producers.composite_producer(self.outgoing)
                self.delete('content-length')
                self.update('Transfer-Encoding', 'chunked')
                if way_to_compress:
                    if way_to_compress == "gzip":
                        compressing_producer = producers.gzipped_producer
                    else:  # deflate
                        compressing_producer = producers.compressed_producer
                    outgoing_producer = compressing_producer(outgoing_producer)
                outgoing_producer = producers.chunked_producer(
                    outgoing_producer)
                outgoing_header = producers.simple_producer(
                    self.build_reply_header(with_header).encode("utf8"))

            else:
                self.delete('transfer-encoding')
                if way_to_compress:
                    if way_to_compress == "gzip":
                        compressor = compressors.GZipCompressor()
                    else:  # deflate
                        compressor = zlib.compressobj(6, zlib.DEFLATED)
                    cdata = b""
                    has_producer = 1
                    while 1:
                        has_producer, producer = self.outgoing.pop()
                        if not has_producer: break
                        while 1:
                            data = producer.more()
                            if not data:
                                break
                            cdata += compressor.compress(data)
                    cdata += compressor.flush()
                    self.update("Content-Length", len(cdata))
                    outgoing_producer = producers.simple_producer(cdata)
                else:
                    outgoing_producer = producers.composite_producer(
                        self.outgoing)

                outgoing_header = producers.simple_producer(
                    self.build_reply_header(with_header).encode("utf8"))

            outgoing_producer = producers.composite_producer(
                producers.fifo([outgoing_header, outgoing_producer]))

        outgoing_producer = self.log_or_not(self.request.uri,
                                            outgoing_producer, self.log)
        if do_optimize:
            outgoing_producer = producers.globbing_producer(outgoing_producer)

        # IMP: second testing after push_with_producer()->init_send ()
        if self.request.channel is None: return

        if upgrade_to:
            request, terminator = upgrade_to
            self.request.channel.current_request = request
            self.request.channel.set_terminator(terminator)
        else:
            # preapre to receice new request for channel
            self.request.channel.current_request = None
            self.request.channel.set_terminator(b"\r\n\r\n")

        # proxy collector and producer is related to asynconnect
        # and relay data with channel
        # then if request is suddenly stopped, make sure close them
        self.die_with(self.request.collector)
        self.die_with(self.request.producer)

        logger = self.request.logger  #IMP: for  disconnect with request
        try:
            if outgoing_producer:
                self.request.channel.push_with_producer(outgoing_producer)
            if close_it:
                self.request.channel.close_when_done()
        except:
            logger.trace()