def handle_request (self, handler): self.request = handler.request stream_id = self.get_new_stream_id () self.add_request (stream_id, handler) self.asyncon.set_active (False) headers, content_encoded = handler.get_request_header ("2.0", False) payload = handler.get_request_payload () producer = None if payload: if type (payload) is bytes: producer = producers.globbing_producer ( producers.simple_producer (payload) ) else: # multipart, grpc_producer producer = producers.globbing_producer (payload) header = h2header_producer (stream_id, headers, producer, self.conn, self._clock) self.asyncon.push (header) if producer: payload = h2frame_producer (stream_id, 0, 1, producer, self.conn, self._clock) # is it proper? #payload = producers.ready_globbing_producer (payload) self.asyncon.push (payload)
def push(self, thing): if not self.is_responsable(): return if type(thing) is bytes: self.outgoing.push(producers.simple_producer(thing)) else: self.outgoing.push(thing)
def found_terminator(self): line = "".join(self.__line) self.__line = [] code, _resp = self.get_reply (line) if code == -1: self.__code, self.__resp = 801, "SMTP Server Response Error" self.close () return self.__mline.append (_resp) if line [3:4] == "-": return else: for each in self.__mline [1:]: auth_match = OLDSTYLE_AUTH.match(each) if auth_match: self.esmtp_features["auth"] = self.esmtp_features.get("auth", "") \ + " " + auth_match.groups(0)[0] if self.debug: print (self.esmtp_features) continue m=FEATURE.match(each) if m: feature=m.group("feature").lower() params=m.string[m.end("feature"):].strip() if feature == "auth": self.esmtp_features[feature] = self.esmtp_features.get(feature, "") \ + " " + params else: self.esmtp_features[feature]=params resp = " ".join (self.__mline) self.__mline = [] if self.__stat == 0: if code != 220: self.__code, self.__resp = code, resp self.__stat = 9 self.push ("quit") return self.__stat = 1 self.push ("ehlo %s" % socket.getfqdn()) elif self.__stat == 1: if not (200 <= code <= 299): self.__code, self.__resp = code, resp self.__stat = 2 self.is_esmtp = False self.push ("helo %s" % socket.getfqdn()) return if self.composer.get_LOGIN (): self.login () else: self.__stat = 4 self.send_from () elif self.__stat == 2: if not (200 <= code <= 299): self.__code, self.__resp = code, resp self.__stat = 10 # not SMTP, close immediatly return self.__stat = 4 self.send_from () elif self.__stat == 2.5: if code != 334: self.__code, self.__resp = code, resp self.__stat = 9 self.push ("rset") return self.login (2, resp) elif self.__stat == 3: if code not in (235, 503): self.__code, self.__resp = code, resp self.__stat = 9 self.push ("rset") return self.__stat = 4 self.send_from () elif self.__stat == 4: if not (200 <= code <= 299): self.__code, self.__resp = code, resp self.__stat = 9 self.push ("rset") return self.__stat = 5 self.push ("rcpt TO:%s" % quoteaddr (self.composer.get_TO ())) elif self.__stat == 5: if not (250 <= code <= 251): self.__code, self.__resp = code, resp self.__stat = 9 self.push ("rset") return self.__stat = 6 self.push ("data") elif self.__stat == 6: if code != 354: self.__code, self.__resp = code, resp self.__stat = 9 self.push ("rset") return self.__stat = 9 q = quotedata (self.composer.get_DATA ()) + ".\r\n" self.push_with_producer (producers.simple_producer (q.encode ("utf8"))) #self.push (q.encode ("utf8")) self.__sent = len (q) elif self.__stat == 9: if self.__sent and code == 250: self.__code, self.__resp = -250, "OK" elif self.__sent: self.__code, self.__resp = code, resp self.__stat = 10 self.push ("quit") else: self.handle_close ()
def done(self, force_close=False, upgrade_to=None, with_header=1): self.content_type = self.get('content-type') if not self.is_responsable(): return self._is_done = True if self.request.channel is None: return self.htime = (time.time() - self.stime) * 1000 self.stime = time.time() #for delivery time # compress payload and globbing production do_optimize = True if upgrade_to or self.is_async_streaming(): do_optimize = False connection = http_util.get_header(http_util.CONNECTION, self.request.header).lower() close_it = False way_to_compress = "" wrap_in_chunking = False if force_close: close_it = True if self.request.version == '1.1': self.update('Connection', 'close') else: self.delete('Connection') else: if self.request.version == '1.0': if connection == 'keep-alive': if not self.has_key('content-length'): close_it = True self.update('Connection', 'close') else: self.update('Connection', 'keep-alive') else: close_it = True elif self.request.version == '1.1': if connection == 'close': close_it = True self.update('Connection', 'close') if not self.has_key('transfer-encoding') and not self.has_key( 'content-length') and self.has_key('content-type'): wrap_in_chunking = True else: # unknown close self.update('Connection', 'close') close_it = True if len(self.outgoing) == 0: self.update('Content-Length', "0") self.delete('transfer-encoding') self.delete('content-type') outgoing_producer = producers.simple_producer( self.build_reply_header(with_header).encode("utf8")) do_optimize = False elif len(self.outgoing) == 1 and hasattr(self.outgoing.first(), "ready"): outgoing_producer = producers.composite_producer(self.outgoing) if wrap_in_chunking: self.update('Transfer-Encoding', 'chunked') outgoing_producer = producers.chunked_producer( outgoing_producer) outgoing_header = producers.simple_producer( self.build_reply_header(with_header).encode("utf8")) self.request.channel.push_with_producer(outgoing_header) do_optimize = False elif do_optimize and not self.has_key('Content-Encoding'): maybe_compress = self.request.get_header("Accept-Encoding") if maybe_compress: cl = self.has_key("content-length") and int( self.get("Content-Length")) or -1 if cl == -1: cl = self.outgoing.get_estimate_content_length() if 0 < cl <= UNCOMPRESS_MAX: maybe_compress = "" elif not wrap_in_chunking and cl > ONETIME_COMPRESS_MAX: # too big for memory, do not compress maybe_compress = "" if maybe_compress: content_type = self.get("Content-Type") if content_type and ( content_type.startswith("text/") or content_type.startswith("application/json")): accept_encoding = [ x.strip() for x in maybe_compress.split(",") ] if "gzip" in accept_encoding: way_to_compress = "gzip" elif "deflate" in accept_encoding: way_to_compress = "deflate" if way_to_compress: if self.has_key('Content-Length'): self.delete("content-length") # rebuild self.update('Content-Encoding', way_to_compress) if wrap_in_chunking: outgoing_producer = producers.composite_producer(self.outgoing) self.delete('content-length') self.update('Transfer-Encoding', 'chunked') if way_to_compress: if way_to_compress == "gzip": compressing_producer = producers.gzipped_producer else: # deflate compressing_producer = producers.compressed_producer outgoing_producer = compressing_producer(outgoing_producer) outgoing_producer = producers.chunked_producer( outgoing_producer) outgoing_header = producers.simple_producer( self.build_reply_header(with_header).encode("utf8")) else: self.delete('transfer-encoding') if way_to_compress: if way_to_compress == "gzip": compressor = compressors.GZipCompressor() else: # deflate compressor = zlib.compressobj(6, zlib.DEFLATED) cdata = b"" has_producer = 1 while 1: has_producer, producer = self.outgoing.pop() if not has_producer: break while 1: data = producer.more() if not data: break cdata += compressor.compress(data) cdata += compressor.flush() self.update("Content-Length", len(cdata)) outgoing_producer = producers.simple_producer(cdata) else: outgoing_producer = producers.composite_producer( self.outgoing) outgoing_header = producers.simple_producer( self.build_reply_header(with_header).encode("utf8")) outgoing_producer = producers.composite_producer( producers.fifo([outgoing_header, outgoing_producer])) outgoing_producer = self.log_or_not(self.request.uri, outgoing_producer, self.log) if do_optimize: outgoing_producer = producers.globbing_producer(outgoing_producer) # IMP: second testing after push_with_producer()->init_send () if self.request.channel is None: return if upgrade_to: request, terminator = upgrade_to self.request.channel.current_request = request self.request.channel.set_terminator(terminator) else: # preapre to receice new request for channel self.request.channel.current_request = None self.request.channel.set_terminator(b"\r\n\r\n") # proxy collector and producer is related to asynconnect # and relay data with channel # then if request is suddenly stopped, make sure close them self.die_with(self.request.collector) self.die_with(self.request.producer) logger = self.request.logger #IMP: for disconnect with request try: if outgoing_producer: self.request.channel.push_with_producer(outgoing_producer) if close_it: self.request.channel.close_when_done() except: logger.trace()