def _perform_handshake(self): """Perform The WebSocket Handshake""" try: Log.add("Got To Handshake") data = self.recv(1024).strip() # Log.add("Data: %s" % data) headers = Message(StringIO(data.split('\r\n', 1)[1])) Log.add("Parsed Headers:") # Log.add(headers) if headers.get('Upgrade', None) == 'websocket': Log.add("Attempting Handshake") # create response key key = b64encode(sha1(headers['Sec-WebSocket-Key'] + self.SALT).digest()) # create response headers response = ( "HTTP/1.1 101 Web Socket Protocol Handshake\r\n" "Upgrade: websocket\r\n" "Connection: Upgrade\r\n" "Sec-WebSocket-Origin: %s\r\n" "Sec-WebSocket-Accept: %s\r\n\r\n" % (headers["Origin"], key) ) if self.send_bytes(response): Log.add("Handshake successful") self._assign_room(data) self._ready_state = "authenticating" except Exception as e: Log.add(e.args)
def handshake(self): log.debug("Request") data = self.request.recv(1024).strip() headers = Message(StringIO(data.split('\r\n', 1)[1])) log.debug("Headers") if headers.get("Upgrade", None) != "websocket": return log.debug("Handshake") key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.handshake_done = self.request.send(response) global clients clients += [self]; # Send over a full delta combo = { "name": "combo", "connections": connections, "links": links } c = json.dumps(combo) self.send_message(c)
def test_spamc_unix_headers(self): with open(self.filename) as handle: result = self.spamc_unix.headers(handle) self.assertIn('message', result) with open(self.filename) as headerhandle: headers = Message(headerhandle) org_subject = "Subject: %s" % headers.get('Subject') new_subject = "Subject: %s" % result['headers'].get('Subject') self.assertEqual(org_subject, new_subject)
def test_spamc_tcp_process(self): with open(self.filename) as _handle: data = _handle.read() handle = StringIO(data) result = self.spamc_tcp.process(handle) self.assertIn("message", result) with open(self.filename) as headerhandle: headers1 = Message(headerhandle) headers2 = Message(StringIO(result["message"])) self.assertEqual(headers1.get("Subject"), headers2.get("Subject"))
def test_spamc_unix_process(self): with open(self.filename) as handle: result = self.spamc_unix.process(handle) self.assertIn('message', result) with open(self.filename) as headerhandle: headers1 = Message(headerhandle) headers2 = Message(StringIO(result['message'])) self.assertEqual( headers1.get('Subject'), headers2.get('Subject') )
def handshake(self): data = self.request.recv(1024).strip() headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get("Upgrade", None) != "websocket": return print 'Handshaking...' key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n'+'Upgrade: websocket\r\n'+'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.handshake_done = self.request.send(response)
def test_spamc_tcp_headers(self): with open(self.filename) as _handle: data = _handle.read() handle = StringIO(data) result = self.spamc_tcp.headers(handle) self.assertIn("message", result) with open(self.filename) as headerhandle: headers = Message(headerhandle) org_subject = "Subject: %s" % headers.get("Subject") new_subject = "Subject: %s" % result["headers"].get("Subject") self.assertEqual(org_subject, new_subject)
def request_without_data(request, req_type, retries, time_sleep, timeout_sec, payload, condition, output, sample_event, message, log_in_file, thread_tmp_filename, language, targets, ports, default_ports, socks_proxy): """ this function extracts the data, headers and url for the requests other than POST type which is to be sent to the __http_request_maker function Args: request: the returned data from __http_requests_generator function req_type: GET, POST, PUT, DELETE or PATCH payload: the payload corresponding to which the request is made condition: the condition to be evaluated. eg: response.status_code == 200 other args: retries, time_sleep, timeout_sec, output, sample_event, message, log_in_file, thread_tmp_filename, language Returns: the list of outputs in the format [ { "payload": payload1, "condition": condition1, "result": rule_evaluator(response, condition), "response": response1 },...... ] """ request_line, headers_alone = request.split('\r\n', 1) headers = Message(StringIO(headers_alone)).dict clean_headers = {x.strip(): y for x, y in headers.items()} headers = clean_headers headers.pop("Content-Length", None) url_sample = request_line.strip().split(' ')[1] for target in targets: url = url_sample.replace('__target_locat_here__', str(target)) port = url[url.find(':', 7) + 1:url.find('/', 7)] response = __http_request_maker(req_type, url, headers, retries, time_sleep, timeout_sec) if isinstance(response, requests.models.Response): if rule_evaluator(response, condition): __log_into_file(thread_tmp_filename, 'w', '0', language) sample_event['PORT'] = port event_parser(message, sample_event, response, payload, log_in_file, language) output.append({ "payload": payload, "condition": condition, "result": rule_evaluator(response, condition), "response": response }) return output
def parse_headers(header_string): """ Parse a header-string into individual headers Implementation based on: http://stackoverflow.com/a/5955949/95122 """ # First line is request line, strip it out if not header_string: return dict() request, headers = header_string.split('\r\n', 1) if not headers: return dict() else: header_msg = Message(StringIO(headers)) return dict(header_msg.items())
def handshake(self): data = self.request.recv(1024).strip() headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get("Upgrade", None) != "websocket": return print 'Handshaking...' key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.handshake_done = self.request.send(response)
def parseRequestHeaders(self): header_bytes = "" header_count = 0 while True: if header_count >= self.max_headers: self.requestError(BAD_REQUEST, "Bad request (too many headers)") line = self.rin.readline() header_bytes += line header_count += 1 if line == '\r\n' or line == '\n' or line == '': break header_input = StringIO(header_bytes) self.request_headers = Message(header_input)
def handshake(self, data): print(data) headers = Message(StringIO(data.strip().split('\r\n', 1)[1])) if headers.get("Upgrade", None) != "websocket": return False logging.debug("handshaking...") key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest #self.handshake_done = self.request.send(response) self.out_buffer = response return True
def handshake(self): data = self.channel.recv(1024).strip() new_data = data.split('\r\n', 1) if not new_data: return headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get("Upgrade", None) == "Websocket" or headers.get("Upgrade", None) == "websocket": key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.handshake_done = self.channel.send(response) self.valid_client = True
def mk_header(self): rfile = StringIO(self.rfile) requestline = rfile.readline()[:-2] if not requestline: self.shutdown() return words = requestline.split() if len(words) == 3: [method, self.path, self.version] = words if self.version[:5] != 'HTTP/': self.shutdown() return elif len(words) == 2: [method, self.path] = words else: self.shutdown() return self.method = method.lower() self.headers, self.rfile = Message(rfile, 0), '' self.__getitem__ = self.headers.getheader try: tasklet(controller)(self) except: print_exc(file=stderr)
def _getJson(self, page, query=None): response = self._get(page, query) headers = Message(StringIO(response.split('\n')[0])) self.assertEqual(headers['content-type'], 'application/json') body = ''.join(response.split('\n')[2:]) self.assertTrue(len(body) > 0) return json.loads(body)
def handshake(self): data = self.sock.readTo('\r\n\r\n').strip() headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get('Upgrade', None).lower() != 'websocket': print self.client_address, 'missing header "Upgrade: websocket"' return False self.client_ip = headers.get('X-Forwarded-For', self.client_address[0]) key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.sock.send(response) self.on_handshake() return True
def parseData(self, data): data = data.split("|", 2) dataDict = {"from": data[0], "to": data[1]} path, headers = data[2].split('\r\n', 1) payload = Message(StringIO(headers)) url = "http://" + payload['host'] + path.split(" ")[1] if url.lower().endswith(('.png', '.ico', '.jpeg', '.jpg', '.gif', '.svg')): dataDict['image'] = url else: dataDict['url'] = url if 'cookie' in payload: dataDict['cookie'] = payload['cookie'] postData = data[2].split('\r\n\r\n') if len(postData) == 2: if postData[1].strip(): dataDict['post'] = postData[1] print dataDict; return dataDict
def resolve_uri(uri, accept="application/rdf+xml", include_info=False): """ Resolve an RDF URI and return the RDF/XML. """ logging.debug("resolve uri: " + uri) if uri.startswith("http:"): opener = urllib2.build_opener(urllib2.HTTPHandler) elif uri.startswith("https:"): opener = urllib2.build_opener(urllib2.HTTPSHandler) request = urllib2.Request(uri) request.add_header('Accept', accept) request.get_method = lambda: 'GET' url = opener.open(request) try: data = url.read() #data = threads.blockingCallFromThread(reactor, url.read) except Exception as e: logging.debug("Error in resolve_uri: " + str(e)) if include_info: headers = Message(StringIO("".join(url.info().headers))) return {"data": data, "info": url.info(), "headers": headers} else: return data
def _receive_handshake(self): while True: buf = self.sock.recv(2048) if "\r\n\r\n" in buf: break headers, buf = buf.split("\r\n\r\n", 1) status_line, headers = headers.split("\r\n", 1) headers = Message(StringIO(headers)) if (status_line != 'HTTP/1.1 101 Web Socket Protocol Handshake' or headers.get('Connection') != 'Upgrade' or headers.get('Upgrade') != 'WebSocket'): raise WebSocketError('Invalid handshake') return buf
class FTPRangeHandler(urllib2.FTPHandler): """ FTP Range support.. """ def ftp_open(self, req): host = req.get_host() host, port = urllib.splitport(host) if port is None: port = ftplib.FTP_PORT try: host = socket.gethostbyname(host) except socket.error, msg: raise FetchError(msg) path, attrs = urllib.splitattr(req.get_selector()) dirs = path.split('/') dirs = map(urllib.unquote, dirs) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] try: fw = self.connect_ftp('', '', host, port, dirs) type = file and 'I' or 'D' for attr in attrs: attr, value = urllib.splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() rawr = req.headers.get('Range', None) if rawr: rest = int(rawr.split("=")[1].rstrip("-")) else: rest = 0 fp, retrlen = fw.retrfile(file, type, rest) fb, lb = rest, retrlen if retrlen is None or retrlen == 0: raise RangeError retrlen = lb - fb if retrlen < 0: # beginning of range is larger than file raise RangeError headers = '' mtype = guess_type(req.get_full_url())[0] if mtype: headers += 'Content-Type: %s\n' % mtype if retrlen is not None and retrlen >= 0: headers += 'Content-Length: %d\n' % retrlen try: from cStringIO import StringIO except ImportError, msg: from StringIO import StringIO return urllib.addinfourl(fp, Message(StringIO(headers)), req.get_full_url())
def populate_headers(self): line = None while not line: line = self.mr.buf_reader.readline().strip() while line != "": self.headers.update(Message(StringIO(line))) line = self.mr.buf_reader.readline().strip()
def process_response_textblock(response_text_block): response_line, header_alone = response_text_block.split('\r\n', 1) version, code = response_line.split()[0:2] headers = Message(StringIO(header_alone)) response = {} response.update(headers.dict) response['code'] = code return response
def handshake(self): data = self.channel.recv(1024).strip() new_data = data.split('\r\n', 1) if not new_data: return headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get("Upgrade", None) == "Websocket" or headers.get( "Upgrade", None) == "websocket": key = headers['Sec-WebSocket-Key'] digest = b64encode( sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.handshake_done = self.channel.send(response) self.valid_client = True
def wait_for_sip_read(self): """ This function waits for events and dispatches event handlers. This function should run inside a greenlet. """ while True: fileno = self.socket.fileno() gsocket.wait_read(fileno) message = self.socket.recv(8192) header, body = message.split("\r\n\r\n", 1) sip_status_line, parse_headers = header.split("\r\n", 1) sip_status = sip_status_line.split(" ") # FIXME Debug info print sip_status_line if sip_status[0] != "SIP/2.0": print "WARNING: Got:", sip_status_line print header print body continue headers = Message(StringIO(parse_headers)) if sip_status[1] == '100' and headers['cseq'] == "20 INVITE": if 'trying' in self.handlers: for fn in self.handlers['trying']: fn(headers, body) elif sip_status[1] == '180' and headers['cseq'] == "20 INVITE": if 'ringing' in self.handlers: for fn in self.handlers['ringing']: fn(headers, body) elif sip_status[1] == '200' and headers['cseq'] == "20 INVITE": self.contact = headers['contact'][1:-1] self.to_hdr = headers['to'] self.from_hdr = headers['from'] if 'invite_ok' in self.handlers: for fn in self.handlers['invite_ok']: fn(headers, body) elif sip_status[1] == '404' and headers['cseq'] == "20 INVITE": if 'not_found' in self.handlers: for fn in self.handlers['not_found']: fn(headers, body) elif sip_status[1] == '415' and headers['cseq'] == "20 INVITE": if 'media-error' in self.handlers: for fn in self.handlers['media-error']: fn(headers, body) else: print header, body if 'other' in self.handlers: for fn in self.handlers['other']: fn(headers, body)
def _receive_handshake(self): while True: buf = self.sock.recv(2048) if "\r\n\r\n" in buf: break headers, buf = buf.split("\r\n\r\n", 1) status_line, headers = headers.split("\r\n", 1) headers = Message(StringIO(headers)) if ( status_line != 'HTTP/1.1 101 Web Socket Protocol Handshake' or headers.get('Connection') != 'Upgrade' or headers.get('Upgrade') != 'WebSocket' ): raise WebSocketError('Invalid handshake') return buf
def _receive_handshake(self): while True: buf = self.sock.recv(2048) if "\r\n\r\n" in buf: break headers, buf = buf.split("\r\n\r\n", 1) status_line, headers = headers.split("\r\n", 1) headers = Message(StringIO(headers)) if ( (not status_line.startswith('HTTP/1.1 101')) or headers.get('Connection') != 'Upgrade' or headers.get('Upgrade') != 'WebSocket' ): raise WebSocketError('Invalid handshake') return buf.split("fQJ,fN/4F4!~K~MH")[-1]
def product_appid(prod_name, prod_dict): url = product_appid_url(prod_name, prod_dict) raw_response = check_output([CURL_PATH, '-is', url]) request, body = raw_response.split('\r\n\r\n', 1) response, headers = request.split('\r\n', 1) header_dict = dict(Message(StringIO(headers))) location = header_dict['location'] query = urlparse(location).query return parse_qs(query)['appIdKey'][0]
def process_reqeust_textblock(request_text_block): request_line, header_alone = request_text_block.split('\r\n', 1) headers = Message(StringIO(header_alone)) method, uri, http_version = request_line.split() request = {} request.update(headers.dict) request['request_method'] = method request['request_uri'] = uri return request
def print_http_message(s, color): req_or_res, headers_and_body = s.split('\r\n', 1) fp_headers_and_body = StringIO(headers_and_body) m = Message(fp_headers_and_body) headers_part = fp_headers_and_body.getvalue(True) compressed, out = format_body(m, fp_headers_and_body) cprint(req_or_res, color, attrs=['dark']) cprint(headers_part, color, attrs=['bold']) cprint(out, color)
def _open_stream(self): self.conn.request("GET", self.path, headers=self.request_headers) resp = self.conn.getresponse() content_type = None try: if resp.status != 200 or resp.msg.getmaintype() != 'multipart': raise ConnectionError( u"Unexpected response: {resp.status}\n" u"{resp.msg}\n{data}" .format(data=resp.read(), **locals())) log.debug("Opened stream\n%s", resp.msg) boundary = resp.msg.getparam('boundary') assert boundary fp = ReadlineAdapter(resp) while True: sep = fp.readline().rstrip() if not sep: # XXX: instead of this should just read two bytes # after the end of the data? sep = fp.readline().rstrip() if sep != b'--' + boundary: if sep != b'--' + boundary + b'--': raise StreamingError(u"Bad boundary %r" % sep) break msg = Message(fp, seekable=0) content_length = int(msg['content-length']) # XXX: impose maximum limit on content_length? data = fp.read(content_length) if content_type: bad_type = msg.gettype() != content_type else: bad_type = msg.getmaintype() != 'image' content_type = msg.gettype() if bad_type: raise StreamingError( u"Unexpected content-type\n{msg}\n{data}" .format(**locals())) log.debug("Got part\n%s", msg) yield VideoFrame(data, msg.gettype()) finally: resp.close()
def __init__(self, text, totalSize): self.totalSize = totalSize head, body = text.split('\r\n\r\n', 1) top_line, headers_alone = head.split('\r\n', 1) self.methodStr = top_line # headers is a dict self.headers = Message(StringIO(headers_alone)) self.headersSize = len(headers_alone) # ASCII, so 1 byte per char self.body = body # UTF-8 most likely... self.bodySize = totalSize - self.headersSize
def assert_headers(self, header, body): headers = Message(StringIO(header)) if 'content-length' in headers: content_length = int(headers['content-length']) body_length = len(body) self.assertEqual( content_length, body_length, msg= "Header reported content-length: %d Actual body length was: %d" % (content_length, body_length))
def handshake(self): data = "" for attempt in xrange(0, 20): data += os.read(0, 1024)#.strip() data = data.replace("\n\n", "\r\n") with file('/tmp/log', 'w') as f: f.write(data) try: headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get("Upgrade", None) == "websocket": break except: continue headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get("Upgrade", None) == "websocket": return key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.handshake_done = os.write(1, response)
def msg_handle(self, socket, address): try: rfile = socket.makefile('rb', self.rbufsize) wfile = socket.makefile('wb', self.wbufsize) headers = Message(rfile).dict INFO('get a connection from:%s,headers:%s' % (str(address), headers)) if 'module' in headers and headers['module'] in MODULES: MODULES[headers['module']].handle(wfile, headers) except Exception: ERROR('msg_handle exception,please check')
def decode_request(self): """ Parses json from POST """ self.send_header('Content-type', 'text/html') self.end_headers() headers = Message(StringIO(self.headers)) try: self.raw_data = self.rfile.read(int(headers["Content-Length"])) self.json_data = simplejson.loads(self.raw_data) except Exception, e: logging.warning("Cannot parse %s" % self.raw_data) return
def handshake(self): data = self.request.recv(1024).strip() headers = Message(StringIO(data.split('\r\n', 1)[1])) if headers.get("Upgrade", None) != "websocket": return print 'Handshaking...' key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest self.handshake_done = self.request.send(response) if self.handshake_done: me = {} me["type"]="m" me["time"]=time.time() me["x"]=0 me["y"]=0 me["d"]=0 me["v"]=0 players.append(self) me["pid"]=players.index(self) memsg=jsone.encode(me) for p in players: if not p == self: other = {} other["type"]="m" other["time"]=p.lastUpdate other["x"]=p.lastX other["y"]=p.lastY other["d"]=p.lastDir other["v"]=p.lastV other["pid"]=players.index(p) self.send_message(jsone.encode(other)) p.send_message(memsg) tellAboutAsteroids(self) tellAboutBot(self) print "Player connected ("+str(len(players))+")"
def do_handshake(self): request = self._sock.recv(1024) # print("[Weblink] Received handshake:", request) request_line, header_lines = request.split('\r\n', 1) headers = Message(StringIO(header_lines)) key = headers['Sec-WebSocket-Key'] # Generate the hash value for the accept header accept = b64encode(sha1(key + MAGICAL_STRING).digest()) response = HANDSHAKE_RESPONSE_RFC6455 % accept # print("[Weblink] Completed handshake:", response) self._sock.send(response) return
def get(url): url = parse_url(url) hostname = url["hostname"] port = url["port"] or 80 path = url["path"] logging.info("[Host: %s port: %s path: %s]", hostname, port, path) # Establish Socket Connection with SocketConnection(hostname, port) as sock: # Send HEAD Request to fetch Headers sock.send("HEAD %s HTTP/1.0\r\nHost: %s\r\n\r\n" % (path, hostname)) data = sock.recv(1024) recv = "" while len(data): recv = recv + data data = sock.recv(1024) # Parse Headers using MIMETools to Fetch Content-Length request_line, headers_alone = recv.split('\r\n', 1) headers = Message(StringIO(headers_alone)) content_length = headers['content-length'] logging.info("[Content-Length: %s]", content_length) # initialize threads for download thread_count = THREAD_COUNT bytes_per_thread = int(content_length) / thread_count download_threads = [] for x in xrange(0, thread_count): start_range = (x) * bytes_per_thread if x is not 0: start_range += 1 end_range = (x + 1) * bytes_per_thread if x == (thread_count - 1): end_range = int(content_length) - 1 download_thread = DownloadThread(x, hostname, port, path, start_range, end_range) download_threads.append(download_thread) for x in xrange(0, thread_count): download_threads[x].start() while threading.active_count() > 1: time.sleep(1) logging.info("Download Thread Running") logging.info("Joining Files") # Joining partfiles file_name = os.path.basename(os.path.basename(path)) with open(file_name, 'w') as outfile: for x in xrange(0, thread_count): with open("partfile" + str(x)) as infile: for line in infile: outfile.write(line) # Removing Partfiles for x in xrange(0, thread_count): os.remove("partfile" + str(x)) logging.info("Exiting")
def test_spamc_unix_process(self): with open(self.filename) as handle: result = self.spamc_unix.process(handle) self.assertIn('message', result) with open(self.filename) as headerhandle: headers1 = Message(headerhandle) headers2 = Message(StringIO(result['message'])) self.assertEqual(headers1.get('Subject'), headers2.get('Subject'))
def handshake(self): print "handshake" data = self.request.recv(1024).strip() dSplit = data.split('\r\n', 1) if len(dSplit) > 1 : headers = Message(StringIO(data.split('\r\n', 1)[1])) else: headers = Message(StringIO(data.split('\r\n', 1))) if headers.get("Upgrade", None) == None: return if headers.get("Upgrade", None).lower() != "websocket": return try: key = headers['Sec-WebSocket-Key'] digest = b64encode(sha1(key + self.magic).hexdigest().decode('hex')) print "has key" except KeyError: self.hasSecKey = False print "no Sec-WebSocket-Key" response = 'HTTP/1.1 101 Switching Protocols\r\n' response += 'Upgrade: websocket\r\n' response += 'Connection: Upgrade\r\n' #this is also where we can distinguish a wifly from a browers if(self.hasSecKey): response += 'Sec-WebSocket-Accept: %s\r\n\r\n' % digest print "sending back handshake" self.handshake_done = self.request.send(response) if(self.hasSecKey): self.server.addBrowser(self) else: self.server.addWiFly(self)
def handshake(self): data = self.request.recv(1024).strip() headers = Message(StringIO(data.split('\r\n', 1)[1])) try: uri = data.split('\r\n')[0].split(' ')[1] authtoken = dict( p.split('=') for p in uri.split('/')[-1].split('?')[1].split( '&'))['authtoken'] if not wsauth.verify_token(authtoken): print "[!] Invalid auth token: {}".format(authtoken) return else: print "[+] Valid auth token: {}".format(authtoken) except Exception, e: print "Error checking token: {}".format(e)
def handshake(self): data = self.request.recv(1024) headers = Message(StringIO(data.split("\r\n", 1)[1])) body = data.split("\r\n\r\n")[1] upgrade = headers.get("Upgrade", "") if upgrade.lower() != "websocket": print "WebSockets client " + self.client_address[0] + " wrong Upgrade" return if headers.getheader("Sec-WebSocket-Key") != None: self.ws_rfc = True response = self.handshake_singleKey(headers) elif (headers.getheader("Sec-WebSocket-Key1") != None) and (headers.getheader("Sec-WebSocket-Key2") != None): self.ws_rfc = False response = self.handshake_twoKeys(headers, body) else: print "WebSockets client " + self.client_address[0] + " wrong structure" return self.handshake_done = self.request.sendall(response) self.server.clients += [self] print "WebSockets client " + self.client_address[0] + " handshaken"
def getheaders(headers): try: import http.client import io except ImportError: from mimetools import Message from StringIO import StringIO return Message(StringIO(headers)) class _FakeSocket(io.BytesIO): def makefile(self, *args, **kw): return self return http.client.parse_headers(_FakeSocket(headers))
def extract_http(request_text, dict_filed): request_line, headers_alone = request_text.split('\r\n', 1) headers = Message(StringIO(headers_alone)) # print request_line request_line = request_line.split(' ', 2) # print request_line[0] if request_line[0] in {'GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT'}: dict_filed['tags']['action'] = request_line[0] dict_filed['tags']['Domain'] = headers['host'] dict_filed['tags']['URL'] = headers['host'] + request_line[1] # print request_line[1] else: dict_filed['tags']['result_code'] = request_line[1] if 'user_agent' in headers.keys(): dict_filed['tags']['user_agent'] = headers['user_agent'] if 'referer' in headers.keys(): dict_filed['tags']['referer'] = headers['referer'] if 'content-type' in headers.keys(): dict_filed['tags']['content-type'] = headers['content-type'] if 'accept' in headers.keys(): dict_filed['tags']['content-type'] = headers['accept'] # print headers.keys() # print dict_filed['URL'] """
def get_contacted_HTTP_Address(self): HTTP_Request_Packets = self.applyFilters(self.capture, self.HTTP_request) urls = [] for pkt in HTTP_Request_Packets: header_raw = pkt.load try: header_line, header_data = header_raw.split("\r\n", 1) except ValueError: pass headers = Message(StringIO(header_data)) uri = header_line.strip("HTTP/1.1").strip("GET").lstrip().rstrip() url = headers["host"] + uri urls.append(url) return urls
def test_spamc_tcp_process(self): with open(self.filename) as _handle: data = _handle.read() handle = StringIO(data) result = self.spamc_tcp.process(handle) self.assertIn('message', result) with open(self.filename) as headerhandle: headers1 = Message(headerhandle) headers2 = Message(StringIO(result['message'])) self.assertEqual(headers1.get('Subject'), headers2.get('Subject'))
#!/usr/bin/env python from mimetools import Message from sys import argv from time import * from rfc822 import parsedate m = Message(open(argv[1])) #m_date = mktime(m.getdate('Date')) #s_date = mktime(localtime(time())) m_date = m.getdate('Date') s_date = localtime(time()) print m_date print asctime(m_date) print "--" print s_date print asctime(s_date)
def process_archive(self, peer, sender, mail_options, recips, rcptopts, data): """Archives email meta data using a Backend""" LOG(E_INFO, "%s: Sender is <%s> - Recipients (Envelope): %s" % (self.type, sender, ",".join(recips))) size = len(data) if size < MINSIZE: return self.do_exit(550, "Invalid Mail") if not data.endswith(NL): data = data + NL args = {} aid = None mid = None stream = StringIO(data) msg = Message(stream) if sender == "": LOG(E_INFO, "%s: Null return path mail, not archived" % (self.type)) return self.sendmail("<>", mail_options, recips, rcptopts, data, aid) ## Check if I have msgid in my cache mid = msg.get("message-id", self.new_mid()) hash = hash_headers(msg.get) if self.hashdb.has_key(hash): LOG(E_TRACE, "%s: Message-id: %s" % (self.type, mid)) aid = self.hashdb[hash] LOG(E_TRACE, "%s: Message already has year/pid pair, only adding header" % self.type) return self.sendmail(sender, mail_options, recips, rcptopts, self.add_aid(data, msg, aid), aid, hash) args["m_mid"] = mid args["hash"] = hash ## Check for duplicate headers dupe = dupe_check(msg.headers) if dupe is not None: LOG(E_ERR, "%s: Duplicate header %s" % (self.type, dupe)) return self.do_exit(552, "Duplicate header %s" % dupe) ## Extraction of From field m_from = msg.getaddrlist("From") if len(m_from) == 1: m_from = safe_parseaddr(m_from[0][1]) else: m_from = None ## Empty or invalid 'From' field, try to use sender if m_from is None: LOG(E_ERR, "%s: no From header in mail using sender" % self.type) m_from = safe_parseaddr(sender) ## No luck if m_from is None: return self.do_exit(552, "Mail has not suitable From/Sender") args["m_from"] = m_from ## Extract 'To' field m_to = [] for h in msg.getaddrlist("To"): rec = safe_parseaddr(h[1]) if rec is None: continue m_to.append(rec) ## Empty 'To' field use recipients if len(m_to) == 0: LOG(E_ERR, "%s: no To header in mail using recipients" % self.type) for recipient in recips: rec = safe_parseaddr(recipient) if rec is None: continue m_to.append(rec) if len(m_to) == 0: return self.do_exit(552, "Mail has not suitable To/Recipient") ## Extract 'Cc' field for h in msg.getaddrlist("Cc"): rec = safe_parseaddr(h[1]) if rec is None: continue m_to.append(rec) ## Cleanup: remove duplicates recs = [] for rec in m_to: if rec not in recs: recs.append(rec) args["m_rec"] = recs ## Extract 'Subject' field m_sub = mime_decode_header(msg.get("Subject", "No Subject")) if subjpattern is not None and m_sub.find(subjpattern) != -1: LOG(E_INFO, "%s: Subject pattern matched, not archived" % self.type) return self.sendmail(sender, mail_options, recips, rcptopts, self.remove_aid(data, msg)) args["m_sub"] = m_sub ## Whitelist check: From, To and Sender (envelope) checklist = [m_from] + m_to ss = safe_parseaddr(sender) if ss is not None: checklist.append(ss) for check in checklist: if check.split("@", 1)[0] in whitelist: LOG(E_INFO, "%s: Mail to: %s in whitelist, not archived" % (self.type, check)) return self.sendmail(sender, mail_options, recips, rcptopts, self.remove_aid(data, msg)) ## Sender size limit check - in kb if dbchecker is not None and dbchecker.quota_check(m_from, size >> 10): return self.do_exit(422, "Sender quota execeded") args["m_size"] = size ## Extract 'Date' field m_date = None if self.datefromemail: m_date = msg.getdate("Date") try: mktime(m_date) except: m_date = None if m_date is None: m_date = localtime(time()) args["m_date"] = m_date m_attach = [] if msg.maintype != "multipart": m_parse = parse_message(msg) if m_parse is not None: m_attach.append(m_parse) else: filepart = MultiFile(stream) filepart.push(msg.getparam("boundary")) try: while filepart.next(): submsg = Message(filepart) subpart = parse_message(submsg) if subpart is not None: m_attach.append(subpart) except: LOG(E_ERR, "%s: Error in multipart splitting" % self.type) args["m_attach"] = m_attach if dbchecker is not None: ## Collect data for mb lookup addrs = [] for addr in [m_from] + m_to: addrs.append(addr) args["m_mboxes"] = dbchecker.mblookup(addrs) else: args["m_mboxes"] = [] year, pid, error = self.backend.process(args) if year == 0: LOG(E_ERR, "%s: Backend Error: %s" % (self.type, error)) return self.do_exit(pid, error) ## Adding X-Archiver-ID: header aid = "%d-%d" % (year, pid) data = self.add_aid(data, msg, aid) LOG(E_TRACE, "%s: inserting %s msg in hashdb" % (self.type, aid)) self.hashdb[hash] = aid self.hashdb.sync() ## Next hop LOG(E_TRACE, "%s: backend worked fine" % self.type) LOG(E_TRACE, "%s: passing data to nexthop: %s:%s" % (self.type, self.output_address, self.output_port)) return self.sendmail(sender, mail_options, recips, rcptopts, data, aid, hash)
def process_storage(self, peer, sender, mail_options, recips, rcptopts, data): """Stores the archived email using a Backend""" size = len(data) if size < MINSIZE: return self.do_exit(550, "Invalid Mail") if not data.endswith(NL): data = data + NL stream = StringIO(data) msg = Message(stream) aid = msg.get(AID, None) ## Check if I have msgid in my cache mid = msg.get("message-id", self.new_mid()) LOG(E_TRACE, "%s: Message-id: %s" % (self.type, mid)) hash = hash_headers(msg.get) if self.hashdb.has_key(hash): aid = self.hashdb[hash] LOG(E_ERR, "%s: Message already processed" % self.type) return self.sendmail(sender, mail_options, recips, rcptopts, data, aid, hash) ## Date extraction m_date = None if self.datefromemail: m_date = msg.getdate("Date") try: mktime(m_date) except: m_date = None if m_date is None: m_date = localtime(time()) del msg, stream ## Mail needs to be processed if aid: try: year, pid = aid.split("-", 1) year = int(year) pid = int(pid) except: t, val, tb = exc_info() del tb LOG(E_ERR, "%s: Invalid X-Archiver-ID header [%s]" % (self.type, str(val))) return self.do_exit(550, "Invalid X-Archiver-ID header") args = dict(mail=data, year=year, pid=pid, date=m_date, mid=mid, hash=hash) LOG(E_TRACE, "%s: year is %d - pid is %d (%s)" % (self.type, year, pid, mid)) status, code, msg = self.backend.process(args) if status == 0: LOG(E_ERR, "%s: process failed %s" % (self.type, msg)) return self.do_exit(code, msg) ## Inserting in hashdb LOG(E_TRACE, "%s: inserting %s msg in hashdb" % (self.type, aid)) self.hashdb[hash] = aid self.hashdb.sync() LOG(E_TRACE, "%s: backend worked fine" % self.type) else: ## Mail in whitelist - not processed LOG(E_TRACE, "%s: X-Archiver-ID header not found in mail [whitelist]" % self.type) ## Next hop LOG(E_TRACE, "%s: passing data to nexthop: %s:%s" % (self.type, self.output_address, self.output_port)) return self.sendmail(sender, mail_options, recips, rcptopts, data, aid, hash)
def assert_headers(self, header, body): headers = Message(StringIO(header)) if headers.has_key('content-length'): content_length = int(headers['content-length']) body_length = len(body) self.assertEqual(content_length, body_length, msg= "Header reported content-length: %d Actual body length was: %d" %(content_length, body_length) )
def clientThread(connection, clientID): # Sending message to connected client preSendTime = int(round(time.time() * 1000)) # Deal with WebSocket handshake here magic = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" data = connection.recv(RECV_BUFFER).strip() headers = Message(StringIO(data.split("\r\n", 1)[1])) if headers.get("Upgrade", None) != "websocket": return print "Handshaking..." key = headers["Sec-WebSocket-Key"] digest = b64encode(sha1(key + magic).hexdigest().decode("hex")) response = "HTTP/1.1 101 Switching Protocols\r\n" response += "Upgrade: websocket\r\n" response += "Connection: Upgrade\r\n" response += "Sec-WebSocket-Accept: %s\r\n\r\n" % digest connection.send(response) print "handshake done." sendMessage(connection, '{"msgtype": "id"}') data = receiveMessage(connection) receivedMessage = json.loads(data) postSendTime = int(round(time.time() * 1000)) latencyEst = (postSendTime - preSendTime) / 2 name = None newId = None global Clients if receivedMessage["msgtype"] == "getid": newId = generateNewCollabId() name = receivedMessage["name"] messageObject = {"msgtype": "setid", "collabid": newId} sendMessage(connection, json.dumps(messageObject)) elif receivedMessage["msgtype"] == "setid": name = receivedMessage["name"] if checkIfCollabIdExists(receivedMessage["collabid"]): messageObject = {"msgtype": "id_ok"} sendMessage(connection, json.dumps(messageObject)) newId = receivedMessage["collabid"] else: messageObject = {"msgtype": "setid", "collabid": generateNewCollabId()} newId = messageObject["collabid"] sendMessage(connection, json.dumps(messageObject)) else: print "Invalid message received." # invalid message from client. if not newId == None: # Send highest current latency to this client currentHighestLatency = 10 for client in Clients: if client.collabID == newId and client.latencyEst > currentHighestLatency: currentHighestLatency = client.latencyEst messageObject = {"msgtype": "onward-latency-update", "latency": currentHighestLatency} sendMessage(connection, json.dumps(messageObject)) # Then send the new client's latency to the other clients.s messageObject = {"msgtype": "onward-latency-update", "latency": latencyEst} for client in Clients: if client.collabID == newId: sendMessage(client.connection, json.dumps(messageObject)) messageObject = {"msgtype": "client-joined", "name": name} sendMessage(client.connection, json.dumps(messageObject)) newClient = ConnectedClient(clientID, newId, connection, latencyEst) Clients.append(newClient) while True: data = receiveMessage(connection) if not data: break # Iterate through clients and send to ones with collabid that aren't this clientid for client in Clients: if (not client.clientID == clientID) and client.collabID == newId: receivedMessage = json.loads(data) print data if receivedMessage["msgtype"] == "chat": receivedMessage["name"] = name sendMessage(client.connection, json.dumps(receivedMessage)) else: sendMessage(client.connection, data) connection.close()
def _websocket_client_handshake(self): buf = [] while True: fds = yield if self.socket in fds: try: data = self._recv(BUF_SIZE) except socket.error as e: handle_socket_error(e) else: buf.append(data) score = 0 for char in itertools.chain(*buf): if char in ('\r', '\n'): score += 1 if score == 4: break else: score = 0 else: continue break header, body = ''.join(buf).split('\r\n\r\n', 1) headers = Message(StringIO(header.split(b'\r\n', 1)[1])) if headers.get(b'Upgrade', '').lower() != b'websocket': raise protocol.ConnectionClosed() # common headers: response = b''.join( (b'HTTP/1.1 101 Switching Protocols\r\n', b'Upgrade: websocket\r\n', b'Connection: Upgrade\r\n')) # if override_host: # headers[b'host'] = override_host if b'Sec-WebSocket-Key1' in headers and \ b'Sec-WebSocket-Key2' in headers: self._decode_message = self._decode_hixie accept = hashlib.md5( b''.join((struct.pack(b'>II', *list( int(b''.join(x for x in headers[key] if x in string.digits), 10) / headers[key].count(b' ') for key in self.HIXIE_WEBSOCKET_SEC_KEY_ORDER) ), body,))).digest() response = b''.join( (response, b'Sec-WebSocket-Origin: %s\r\n' % headers[b'origin'], b'Sec-WebSocket-Location: ws://%s/\r\n' % headers[b'host'], b'\r\n%s' % str(accept), )) self._send(response) self._send = self._send_hixie else: self._decode_message = self._decode_rfc secret_key = \ b''.join((headers[b'Sec-WebSocket-Key'], self.RFC_MAGIC_HASH,)) digest = b64encode( hashlib.sha1( secret_key).hexdigest().decode(b'hex') ) response = \ b''.join((response, b'Sec-WebSocket-Accept: %s\r\n\r\n' % digest,)) self._send(response) self._send = self._send_rfc print("Ready for reading.") self.state = self.READ_WHEN
def expectMultipleRanges(self, range, sets, draft=0, rangeParse=re.compile('bytes\s*(\d+)-(\d+)/(\d+)')): req = self.app.REQUEST rsp = req.RESPONSE # Add headers req.environ['HTTP_RANGE'] = 'bytes=%s' % range if draft: req.environ['HTTP_REQUEST_RANGE'] = 'bytes=%s' % range body = self.doGET(req, rsp) self.failUnless(rsp.getStatus() == 206, 'Expected a 206 status, got %s' % rsp.getStatus()) self.failIf(rsp.getHeader('content-range'), 'The Content-Range header should not be set!') ct = string.split(rsp.getHeader('content-type'), ';')[0] draftprefix = draft and 'x-' or '' self.failIf(ct != 'multipart/%sbyteranges' % draftprefix, "Incorrect Content-Type set. Expected 'multipart/%sbyteranges', " "got %s" % (draftprefix, ct)) if rsp.getHeader('content-length'): self.failIf(rsp.getHeader('content-length') != str(len(body)), 'Incorrect Content-Length is set! Expected %s, got %s.' % ( str(len(body)), rsp.getHeader('content-length'))) # Decode the multipart message bodyfile = cStringIO.StringIO('Content-Type: %s\n\n%s' % ( rsp.getHeader('content-type'), body)) bodymessage = Message(bodyfile) partfiles = MultiFile(bodyfile) partfiles.push(bodymessage.getparam('boundary')) partmessages = [] add = partmessages.append while partfiles.next(): add(Message(cStringIO.StringIO(partfiles.read()))) # Check the different parts returnedRanges = [] add = returnedRanges.append for part in partmessages: range = part['content-range'] start, end, size = rangeParse.search(range).groups() start, end, size = int(start), int(end), int(size) end = end + 1 self.failIf(size != len(self.data), 'Part Content-Range header reported incorrect length. ' 'Expected %d, got %d.' % (len(self.data), size)) part.rewindbody() body = part.fp.read() # Whotcha! Bug in MultiFile; the CRLF that is part of the boundary # is returned as part of the body. Note that this bug is resolved # in Python 2.2. if body[-2:] == '\r\n': body = body[:-2] self.failIf(len(body) != end - start, 'Part (%d, %d) is of wrong length, expected %d, got %d.' % ( start, end, end - start, len(body))) self.failIf(body != self.data[start:end], 'Part (%d, %d) has incorrect data. Expected %s, got %s.' % ( start, end, `self.data[start:end]`, `body`)) add((start, end)) # Copmare the ranges used with the expected range sets. self.failIf(returnedRanges != sets, 'Got unexpected sets, expected %s, got %s' % ( sets, returnedRanges))
#!/usr/bin/env python from mimetools import Message m = Message(open("bug1.eml")) print m.getaddrlist('From')
class HttpRequest: http_version = (1, 1) http_version_string = ("HTTP/%d.%d" % http_version) max_content_length = 10000 max_headers = 500 request_line = None request_method = None request_uri = None request_path = None request_query = None request_version = None content_length = 0 content = None etag = None close_connection = True response_code = 200 response_status = "OK" response_sent = False cached = False last_modified = None forceSSL = False def __init__(self, host, rin, out): self.host = host self.rin = rin self.out = out self.request_args = {} self.args = self.request_args self.request_headers = {} self.request_cookies = {} self.response_headers = {} self.response_cookies = {} self.output = StringIO() self.parseRequest() def isSecure(self): return self.forceSSL def getRequestMethod(self): return self.request_method def trim(self, str, ends): for end in ends: if str.endswith(end): str = str[ : -len(end) ] break return str def requestError(self, code, msg=None): self.sendError(code, msg) raise ValueError(self.response_status) def sendError(self, code, msg=None): self.setResponseCode(code, msg=msg) self.sendResponse() def parseRequestVersion(self, version): try: if not version.startswith('HTTP/'): raise ValueError version_string = version.split('/', 1)[1] version_codes = version_string.split('.') if len(version_codes) != 2: raise ValueError request_version = (int(version_codes[0]), int(version_codes[1])) except (ValueError, IndexError): self.requestError(400, "Bad request version (%s)" % `version`) def parseRequestLine(self): line = self.trim(self.request_line, ['\r\n', '\n']) line_fields = line.split() n = len(line_fields) if n == 3: [method, uri, version] = line_fields elif n == 2: [method, uri] = line_fields version = 'HTTP/0.9' else: self.requestError(BAD_REQUEST, "Bad request (%s)" % `line`) request_version = self.parseRequestVersion(version) if request_version > (2, 0): self.requestError(VERSION_NOT_SUPPORTED, "HTTP version not supported (%s)" % `version`) #if request_version >= (1, 1) and self.http_version >= (1, 1): # self.close_connection = False #else: # self.close_connection = True self.request_method = method self.method = method self.request_uri = uri self.request_version = version uri_query = uri.split('?') if len(uri_query) == 1: self.request_path = uri else: self.request_path = uri_query[0] self.request_query = uri_query[1] self.request_args = parseQueryArgs(self.request_query) self.args = self.request_args def parseRequestHeaders(self): header_bytes = "" header_count = 0 while True: if header_count >= self.max_headers: self.requestError(BAD_REQUEST, "Bad request (too many headers)") line = self.rin.readline() header_bytes += line header_count += 1 if line == '\r\n' or line == '\n' or line == '': break header_input = StringIO(header_bytes) self.request_headers = Message(header_input) def parseRequestCookies(self): cookie_hdr = self.getHeader("cookie") if not cookie_hdr: return for cookie in cookie_hdr.split(';'): try: cookie = cookie.lstrip() (k, v) = cookie.split('=', 1) self.request_cookies[k] = v except ValueError: pass def parseRequestArgs(self): if ((self.content is None) or (self.request_method != "POST")): return content_type = self.getHeader('content-type') if not content_type: return (encoding, params) = cgi.parse_header(content_type) if encoding == URLENCODED: xargs = cgi.parse_qs(self.content.getvalue(), keep_blank_values=True) elif encoding == MULTIPART_FORM_DATA: xargs = cgi.parse_multipart(self.content, params) else: xargs = {} self.request_args.update(xargs) def getCookie(self, k): return self.request_cookies[k] def readContent(self): try: self.content_length = int(self.getHeader("Content-Length")) except: return if self.content_length > self.max_content_length: self.requestError(REQUEST_ENTITY_TOO_LARGE) self.content = self.rin.read(self.content_length) self.content = StringIO(self.content) self.content.seek(0,0) def parseRequest(self): self.request_line = self.rin.readline() self.parseRequestLine() self.parseRequestHeaders() self.parseRequestCookies() connection_mode = self.getHeader('Connection') self.setCloseConnection(connection_mode) self.readContent() self.parseRequestArgs() def setCloseConnection(self, mode): if not mode: return mode = mode.lower() if mode == 'close': self.close_connection = True elif (mode == 'keep-alive') and (self.http_version >= (1, 1)): self.close_connection = False def getCloseConnection(self): return self.close_connection def getHeader(self, k, v=None): return self.request_headers.get(k, v) def getRequestMethod(self): return self.request_method def getRequestPath(self): return self.request_path def setResponseCode(self, code, status=None, msg=None): self.response_code = code if not status: status = getStatus(code) self.response_status = status def setResponseHeader(self, k, v): k = k.lower() self.response_headers[k] = v if k == 'connection': self.setCloseConnection(v) setHeader = setResponseHeader def setLastModified(self, when): # time.time() may be a float, but the HTTP-date strings are # only good for whole seconds. when = long(math.ceil(when)) if (not self.last_modified) or (self.last_modified < when): self.lastModified = when modified_since = self.getHeader('if-modified-since') if modified_since: modified_since = stringToDatetime(modified_since) if modified_since >= when: self.setResponseCode(NOT_MODIFIED) self.cached = True def setContentType(self, ty): self.setResponseHeader("Content-Type", ty) def setEtag(self, etag): if etag: self.etag = etag tags = self.getHeader("if-none-match") if tags: tags = tags.split() if (etag in tags) or ('*' in tags): if self.request_method in ("HEAD", "GET"): code = NOT_MODIFIED else: code = PRECONDITION_FAILED self.setResponseCode(code) self.cached = True def addCookie(self, k, v, expires=None, domain=None, path=None, max_age=None, comment=None, secure=None): cookie = v if expires != None: cookie += "; Expires=%s" % expires if domain != None: cookie += "; Domain=%s" % domain if path != None: cookie += "; Path=%s" % path if max_age != None: cookie += "; Max-Age=%s" % max_age if comment != None: cookie += "; Comment=%s" % comment if secure: cookie += "; Secure" self.response_cookies[k] = cookie def sendResponseHeaders(self): if self.etag: self.setResponseHeader("ETag", self.etag) for (k, v) in self.response_headers.items(): self.send("%s: %s\r\n" % (k.capitalize(), v)) for (k, v) in self.response_cookies.items(): self.send("Set-Cookie: %s=%s\r\n" % (k, v)) self.send("\r\n") def sendResponse(self): if self.response_sent: return self.response_sent = True send_body = self.hasBody() if not self.close_connection: self.setResponseHeader("Connection", "keep-alive") self.setResponseHeader("Pragma", "no-cache") self.setResponseHeader("Cache-Control", "no-cache") self.setResponseHeader("Expires", "-1") if send_body: self.output.seek(0, 0) body = self.output.getvalue() body_length = len(body) self.setResponseHeader("Content-Length", body_length) if self.http_version > (0, 9): self.send("%s %d %s\r\n" % (self.http_version_string, self.response_code, self.response_status)) self.sendResponseHeaders() if send_body: self.send(body) self.flush() def write(self, data): self.output.write(data) def send(self, data): #print 'send>', data self.out.write(data) def flush(self): self.out.flush() def hasNoBody(self): return ((self.request_method == "HEAD") or (self.response_code in NO_BODY_CODES) or self.cached) def hasBody(self): return not self.hasNoBody() def process(self): pass return self.close_connection def getRequestHostname(self): """Get the hostname that the user passed in to the request. Uses the 'Host:' header if it is available, and the host we are listening on otherwise. """ return (self.getHeader('host') or socket.gethostbyaddr(self.getHostAddr())[0] ).split(':')[0] def getHost(self): return self.host def getHostAddr(self): return self.host[0] def getPort(self): return self.host[1] def setHost(self, host, port, ssl=0): """Change the host and port the request thinks it's using. This method is useful for working with reverse HTTP proxies (e.g. both Squid and Apache's mod_proxy can do this), when the address the HTTP client is using is different than the one we're listening on. For example, Apache may be listening on https://www.example.com, and then forwarding requests to http://localhost:8080, but we don't want HTML produced to say 'http://localhost:8080', they should say 'https://www.example.com', so we do:: request.setHost('www.example.com', 443, ssl=1) """ self.forceSSL = ssl self.received_headers["host"] = host self.host = (host, port)
def _extractContentType(self,doc,docDictionary): header = Message(StringIO(docDictionary["DOCHDR"])) return header.gettype()