Exemplo n.º 1
0
	def handle_request (self, handler):
		self.request = handler.request
		stream_id = self.get_new_stream_id ()		
		self.add_request (stream_id, handler)		
		self.asyncon.set_active (False)		
		
		headers, content_encoded = handler.get_request_header ("2.0", False)
		payload = handler.get_request_payload ()
		producer = None
		if payload:
			if type (payload) is bytes:
				producer = producers.globbing_producer (
					producers.simple_producer (payload)
				)
			else:
				# multipart, grpc_producer 
				producer = producers.globbing_producer (payload)
		
		header = h2header_producer (stream_id, headers, producer, self.conn, self._clock)		
		self.asyncon.push (header)		
		if producer:
			payload = h2frame_producer (stream_id, 0, 1, producer, self.conn, self._clock)
			# is it proper?
			#payload = producers.ready_globbing_producer (payload)
			self.asyncon.push (payload)
Exemplo n.º 2
0
    def handle_response(self,
                        stream_id,
                        headers,
                        trailers,
                        producer,
                        do_optimize,
                        force_close=False):
        with self._clock:
            if self.promises:
                self.send_data()

        r = self.get_request(stream_id)
        with self._clock:
            try:
                depends_on, weight = self.priorities[stream_id]
            except KeyError:
                depends_on, weight = 0, 1
            else:
                del self.priorities[stream_id]

        header_producer = h2header_producer(stream_id, headers, producer
                                            or trailers, self.conn,
                                            self._plock)
        if not producer:
            header_producer = r.response.log_or_not(r.uri, header_producer,
                                                    r.response.log)
            self.channel.push_with_producer(header_producer)

        else:
            self.channel.push_with_producer(header_producer)
            outgoing_producer = r.response.log_or_not(r.uri, producer,
                                                      r.response.log)

            if do_optimize:
                outgoing_producer = producers.globbing_producer(
                    outgoing_producer)

            outgoing_producer = h2frame_producer(stream_id, depends_on, weight,
                                                 outgoing_producer, self.conn,
                                                 self._plock, trailers)
            # is it proper?
            #outgoing_producer = producers.ready_globbing_producer (outgoing_producer)
            self.channel.push_with_producer(outgoing_producer)

        if r.is_stream_ended():
            # needn't recv data any more
            self.remove_request(stream_id)

        if force_close:
            return self.go_away(ErrorCodes.CANCEL)

        current_promises = []
        with self._clock:
            while self.promises:
                current_promises.append(self.promises.popitem())
        for promise_stream_id, promise_headers in current_promises:
            self.handle_request(promise_stream_id, promise_headers)
Exemplo n.º 3
0
    def handle_response (self, stream_id, headers, trailers, producer, do_optimize, force_close = False):
        request = self.get_request (stream_id)
        if not request: # reset or canceled
            return

        with self._clock:
            try:
                depends_on, weight = self._priorities [stream_id]
            except KeyError:
                depends_on, weight = 0, 1
            else:
                del self._priorities [stream_id]

        if trailers:
            assert producer, "http/2 or 3's trailser requires body"
        if producer and do_optimize:
            producer = producers.globbing_producer (producer)

        self._producers.append (self.producer_class (self.conn, self._plock, stream_id, headers, producer, trailers, depends_on, weight, request.response.maybe_log))
        self._producers.sort ()
        self.send_data ()

        force_close and self.close (self.errno.FLOW_CONTROL_ERROR)
Exemplo n.º 4
0
    def done(self, force_close=False, upgrade_to=None, with_header=1):
        self.content_type = self.get('content-type')

        if not self.is_responsable(): return
        self._is_done = True
        if self.request.channel is None: return

        self.htime = (time.time() - self.stime) * 1000
        self.stime = time.time()  #for delivery time

        # compress payload and globbing production
        do_optimize = True
        if upgrade_to or self.is_async_streaming():
            do_optimize = False

        connection = http_util.get_header(http_util.CONNECTION,
                                          self.request.header).lower()
        close_it = False
        way_to_compress = ""
        wrap_in_chunking = False

        if force_close:
            close_it = True
            if self.request.version == '1.1':
                self.update('Connection', 'close')
            else:
                self.delete('Connection')

        else:
            if self.request.version == '1.0':
                if connection == 'keep-alive':
                    if not self.has_key('content-length'):
                        close_it = True
                        self.update('Connection', 'close')
                    else:
                        self.update('Connection', 'keep-alive')
                else:
                    close_it = True

            elif self.request.version == '1.1':
                if connection == 'close':
                    close_it = True
                    self.update('Connection', 'close')
                if not self.has_key('transfer-encoding') and not self.has_key(
                        'content-length') and self.has_key('content-type'):
                    wrap_in_chunking = True

            else:
                # unknown close
                self.update('Connection', 'close')
                close_it = True

        if len(self.outgoing) == 0:
            self.update('Content-Length', "0")
            self.delete('transfer-encoding')
            self.delete('content-type')
            outgoing_producer = producers.simple_producer(
                self.build_reply_header(with_header).encode("utf8"))
            do_optimize = False

        elif len(self.outgoing) == 1 and hasattr(self.outgoing.first(),
                                                 "ready"):
            outgoing_producer = producers.composite_producer(self.outgoing)
            if wrap_in_chunking:
                self.update('Transfer-Encoding', 'chunked')
                outgoing_producer = producers.chunked_producer(
                    outgoing_producer)
            outgoing_header = producers.simple_producer(
                self.build_reply_header(with_header).encode("utf8"))
            self.request.channel.push_with_producer(outgoing_header)
            do_optimize = False

        elif do_optimize and not self.has_key('Content-Encoding'):
            maybe_compress = self.request.get_header("Accept-Encoding")
            if maybe_compress:
                cl = self.has_key("content-length") and int(
                    self.get("Content-Length")) or -1
                if cl == -1:
                    cl = self.outgoing.get_estimate_content_length()

                if 0 < cl <= UNCOMPRESS_MAX:
                    maybe_compress = ""
                elif not wrap_in_chunking and cl > ONETIME_COMPRESS_MAX:
                    # too big for memory, do not compress
                    maybe_compress = ""

            if maybe_compress:
                content_type = self.get("Content-Type")
                if content_type and (
                        content_type.startswith("text/")
                        or content_type.startswith("application/json")):
                    accept_encoding = [
                        x.strip() for x in maybe_compress.split(",")
                    ]
                    if "gzip" in accept_encoding:
                        way_to_compress = "gzip"
                    elif "deflate" in accept_encoding:
                        way_to_compress = "deflate"

            if way_to_compress:
                if self.has_key('Content-Length'):
                    self.delete("content-length")  # rebuild
                self.update('Content-Encoding', way_to_compress)

            if wrap_in_chunking:
                outgoing_producer = producers.composite_producer(self.outgoing)
                self.delete('content-length')
                self.update('Transfer-Encoding', 'chunked')
                if way_to_compress:
                    if way_to_compress == "gzip":
                        compressing_producer = producers.gzipped_producer
                    else:  # deflate
                        compressing_producer = producers.compressed_producer
                    outgoing_producer = compressing_producer(outgoing_producer)
                outgoing_producer = producers.chunked_producer(
                    outgoing_producer)
                outgoing_header = producers.simple_producer(
                    self.build_reply_header(with_header).encode("utf8"))

            else:
                self.delete('transfer-encoding')
                if way_to_compress:
                    if way_to_compress == "gzip":
                        compressor = compressors.GZipCompressor()
                    else:  # deflate
                        compressor = zlib.compressobj(6, zlib.DEFLATED)
                    cdata = b""
                    has_producer = 1
                    while 1:
                        has_producer, producer = self.outgoing.pop()
                        if not has_producer: break
                        while 1:
                            data = producer.more()
                            if not data:
                                break
                            cdata += compressor.compress(data)
                    cdata += compressor.flush()
                    self.update("Content-Length", len(cdata))
                    outgoing_producer = producers.simple_producer(cdata)
                else:
                    outgoing_producer = producers.composite_producer(
                        self.outgoing)

                outgoing_header = producers.simple_producer(
                    self.build_reply_header(with_header).encode("utf8"))

            outgoing_producer = producers.composite_producer(
                producers.fifo([outgoing_header, outgoing_producer]))

        outgoing_producer = self.log_or_not(self.request.uri,
                                            outgoing_producer, self.log)
        if do_optimize:
            outgoing_producer = producers.globbing_producer(outgoing_producer)

        # IMP: second testing after push_with_producer()->init_send ()
        if self.request.channel is None: return

        if upgrade_to:
            request, terminator = upgrade_to
            self.request.channel.current_request = request
            self.request.channel.set_terminator(terminator)
        else:
            # preapre to receice new request for channel
            self.request.channel.current_request = None
            self.request.channel.set_terminator(b"\r\n\r\n")

        # proxy collector and producer is related to asynconnect
        # and relay data with channel
        # then if request is suddenly stopped, make sure close them
        self.die_with(self.request.collector)
        self.die_with(self.request.producer)

        logger = self.request.logger  #IMP: for  disconnect with request
        try:
            if outgoing_producer:
                self.request.channel.push_with_producer(outgoing_producer)
            if close_it:
                self.request.channel.close_when_done()
        except:
            logger.trace()