def __init__(self, data): BytesIO.__init__(self, data) self.status = 200 self.version = 'HTTP/1.1' self.reason = 'OK' if PY2: self.msg = HTTPMessage(BytesIO(b'Content-Type: application/x-compressed\r\n')) else: self.msg = HTTPMessage() self.msg.add_header('Content-Type', 'application/x-compressed')
def __init__(self, content): if PY2x: fp = StringIO(content) self._info = HTTPMessage(fp) else: self._info = HTTPMessage() # Adjust to testdata. la = content.split(':') if len(la) > 1: # Get the type by just # using the data at the end. t = la[-1].strip() self._info.set_type(t)
def update(self, lock): """ See if a http or rss page changed. """ self.error = False self.specto.mark_watch_busy(True, self.id) self.specto.logger.log( _("Updating watch: \"%s\"") % self.name, "info", self.__class__) # Create a unique name for each url. digest = md5.new(self.url_).digest() cacheFileName = "".join(["%02x" % (ord(c), ) for c in digest]) self.cacheFullPath_ = os.path.join(cacheSubDir__, cacheFileName) request = urllib2.Request(self.url_, None, {"Accept-encoding": "gzip"}) if (self.cached == 1) or (os.path.exists(self.cacheFullPath_)): self.cached = 1 f = file(self.cacheFullPath_, "r") # Load up the cached version self.infoB_ = HTTPMessage(f) if self.infoB_.has_key('last-modified'): request.add_header("If-Modified-Since", self.infoB_['last-modified']) if self.infoB_.has_key('ETag'): request.add_header("If-None-Match", self.infoB_['ETag']) try: response = urllib2.urlopen(request) except (urllib2.URLError, BadStatusLine), e: self.error = True self.specto.logger.log( _("Watch: \"%s\" has error: ") % self.name + str(e), "error", self.__class__)
def datagramReceived(self, data, addr, outip): if outip not in self.interfaces: if self.INADDR_ANY not in self.interfaces: return req_line, data = data.split('\r\n', 1) method, path, version = req_line.split(None, 3) # check method if method != 'M-SEARCH' or path != '*': return # parse header headers = HTTPMessage(StringIO(data)) mx = int(headers.getheader('MX')) # send M-SEARCH response for udn in self.devices: device = self.devices[udn] delay = random() * mx for packet in device.make_msearch_response(headers, (outip, self.port), addr): buff = build_packet('HTTP/1.1 200 OK', packet) self.reactor.callLater(delay, self._send_packet, self.ssdp, buff, addr) delay += self.SSDP_INTERVAL
def __init__(self, replay_response, method=None): self.reason = replay_response['status']['message'] self.status = replay_response['status']['code'] self.version = None if 'body_text' in replay_response: # JSON decoder returns unicode, not str, so this needs to be # encoded to properly reproduce content off the wire. self._content = replay_response['body_text'].encode('utf8') elif 'body_quoted_printable' in replay_response: # quopri.decodestring returns str, which is correct for content off # the wire. self._content = quopri.decodestring(replay_response['body_quoted_printable']) else: # .decode('base64') returns str, which is correct for content off # the wire. self._content = replay_response['body'].decode('base64') self.fp = StringIO(self._content) msg_fp = StringIO('\r\n'.join('{}: {}'.format(h, v) for h, v in replay_response['headers'].iteritems())) self.msg = HTTPMessage(msg_fp) self.msg.fp = None # httplib does this, okay? length = self.msg.getheader('content-length') self.length = int(length) if length else None # Save method to handle HEAD specially as httplib does self._method = method
def parse_headers(header_list): if isinstance(header_list, dict): return parse_headers_backwards_compat(header_list) headers = "".join(header_list) + "\r\n" msg = HTTPMessage(StringIO(headers)) msg.fp.seek(0) msg.readheaders() return msg
def parse_headers_backwards_compat(header_dict): """ In vcr 0.6.0, I changed the cassettes to store headers as a list instead of a dict. This method parses the old dictionary-style headers for backwards-compatability reasons. """ msg = HTTPMessage(StringIO("")) for key, val in header_dict.iteritems(): msg.addheader(key, val) msg.headers.append("{0}:{1}".format(key, val)) return msg
def __init__(self, conn): HPPResponse.__init__(self, sock=conn.sock, debuglevel=conn.debuglevel, strict=conn.strict, method=conn._method) self.chunked = False self.will_close = False self.length = 0 self.status = 200 self.reason = 'OK' self.msg = HTTPMessage(StringIO(), seekable=0) self.msg.fp = None
def inner(*args, **kwargs): with patch('sanction.urlopen') as mock_urlopen: bdata = type(data) is basestring and data.encode() or data sheaders = '' if headers is not None: sheaders = '\r\n'.join( ['{}: {}'.format(k, v) for k, v in headers.items()]) bheaders = (sheaders or '').encode() mock_urlopen.return_value = addinfourl(BytesIO(bdata), HTTPMessage( BytesIO(bheaders)), '', code=code) fn(*args, **kwargs)
def expect_response(self): if self.fp: self.fp.close() self.fp = None self.fp = self.sock.makefile('rb', 0) version, status, reason = self._read_status() if status != CONTINUE: self._read_status = lambda: (version, status, reason) self.begin() else: self.status = status self.reason = reason.strip() self.version = 11 self.msg = HTTPMessage(self.fp, 0) self.msg.fp = None
def find_user_password(self, realm, authuri): """ Limit number of queries per request. Note that retries needs to be reset in the calling code. """ # allow sending the username:password 5 times before failing! if self.retries > 5: from httplib import HTTPMessage from StringIO import StringIO raise urllib2.HTTPError(authuri, 401, "basic auth failed for realm %r" % realm, HTTPMessage(StringIO("")), None) self.retries += 1 return urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( self, realm, authuri)
def from_dict(cls, data): """Create object from dict.""" obj = cls() for k in cls.attrs: if k in data: setattr(obj, k, data[k]) obj.fp = cls.create_file_descriptor(obj.content) obj.msg = HTTPMessage(io.StringIO(unicode()), 0) for k, v in obj.headers.iteritems(): obj.msg.addheader(k, v) obj.msg.headers = data["raw_headers"] return obj
def __call__(self, opener, method, url, data, headers): if url in self._exceptions: raise self._exceptions[url] if url in self._cookie_responses: plain_headers = self._cookie_responses[url] headers_text = '\n'.join([ header_name + ': ' + plain_headers[header_name] for header_name in plain_headers ]) response_headers = HTTPMessage(StringIO(headers_text)) return self.build_response(url, 200, headers['Cookie'], 'OK', response_headers) elif url in self._responses: return self.build_response(*self._responses[url]) else: return self.build_response(url, 404, '', 'Not Found')
def __init__(self, recorded_response): self.recorded_response = recorded_response self.reason = recorded_response['status']['message'] self.status = recorded_response['status']['code'] self.version = None self._content = StringIO(self.recorded_response['body']['string']) # We are skipping the header parsing (they have already been parsed # at this point) and directly adding the headers to the header # container, so just pass an empty StringIO. self.msg = HTTPMessage(StringIO('')) for key, val in self.recorded_response['headers'].iteritems(): self.msg.addheader(key, val) # msg.addheaders adds the headers to msg.dict, but not to # the msg.headers list representation of headers, so # I have to add it to both. self.msg.headers.append("{0}:{1}".format(key, val)) self.length = self.msg.getheader('content-length') or None
def test_from_httplib(self): if six.PY3: raise SkipTest() from httplib import HTTPMessage from StringIO import StringIO msg = """ Server: nginx Content-Type: text/html; charset=windows-1251 Connection: keep-alive Set-Cookie: bb_lastvisit=1348253375; expires=Sat, 21-Sep-2013 18:49:35 GMT; path=/ Set-Cookie: bb_lastactivity=0; expires=Sat, 21-Sep-2013 18:49:35 GMT; path=/ """ msg = HTTPMessage(StringIO(msg.lstrip().replace('\n', '\r\n'))) d = HTTPHeaderDict.from_httplib(msg) self.assertEqual(d['server'], 'nginx') cookies = d.getlist('set-cookie') self.assertEqual(len(cookies), 2) self.assertTrue(cookies[0].startswith("bb_lastvisit")) self.assertTrue(cookies[1].startswith("bb_lastactivity"))
def from_dict(cls, data): """Create object from dict.""" # Hack to ensure backwards compatibility with older versions of the # that did not have the length and version attributes. data.setdefault('length', len(data['content'])) data.setdefault('version', 10) obj = cls() for k in cls.attrs: setattr(obj, k, data[k]) obj.fp = cls.create_file_descriptor(obj.content) obj.msg = HTTPMessage(io.StringIO(unicode()), 0) for k, v in obj.headers.iteritems(): obj.msg.addheader(k, v) obj.msg.headers = data["raw_headers"] return obj
def readbodychunked(self, ifile, ofile, dechunk=False, deflength=4096): totaldata = 0 while True: # Chunk length (discard trailing ';....') clength = ifile.readline() if not dechunk: ofile.write(clength) clength = clength.rstrip('\r\n').split(';', 1)[0] clength = int(clength, 16) if not clength: break # Chunk data while clength: data = ifile.read(min(clength, deflength)) ldata = len(data) clength -= ldata totaldata += ldata ofile.write(data) # Chunk end - trailing newline chunkend = ifile.readline() if not dechunk: ofile.write(chunkend) ofile.flush() # Chunk trailers if dechunk: ofile = StringIO() while True: line = ifile.readline() ofile.write(line) if line == '\r\n' or line == '\r': break ofile.flush() footers = None if not dechunk else HTTPMessage(ofile, seekable=0) return (totaldata, footers)
def __init__(self, replay_response, method=None): self.reason = replay_response['status']['message'] self.status = replay_response['status']['code'] self.version = None if 'body_quoted_printable' in replay_response: self._content = quopri.decodestring( replay_response['body_quoted_printable']) else: self._content = replay_response['body'].decode('base64') self.fp = StringIO(self._content) msg_fp = StringIO('\r\n'.join( '{}: {}'.format(h, v) for h, v in replay_response['headers'].iteritems())) self.msg = HTTPMessage(msg_fp) self.msg.fp = None # httplib does this, okay? length = self.msg.getheader('content-length') self.length = int(length) if length else None # Save method to handle HEAD specially as httplib does self._method = method
def response(self, url, bypass_content): # bypass_content: { url1: [content, headers], try: data = json.loads(bypass_content) if url in data: d = data[url] fp = io.BytesIO(d[0].encode('utf-8')) headers = HTTPMessage(io.StringIO(unicode(d[1])), 0) # for head in d[1].split("\n"): # xy = head.split(":") # headers.addheader(xy[0].strip(), xy[1].strip()) url = url code = 200 msg = 'OK' res = addinfourl(fp, headers, url, code) youtubedl_logger.info('Request Bypassed for url -> '+ url) return res else: youtubedl_logger.info('Request Skipped for url -> ' + url) return None except Exception as e: youtubedl_logger.info('Request Exception for url -> ' + url) traceback.print_exc() return None
#!/usr/bin/env python import sys import logging logging.getLogger("scapy.runtime").setLevel(logging.ERROR) from scapy.all import * from httplib import HTTPMessage import StringIO import gzip import re if len(sys.argv) > 1: packets = rdpcap(sys.argv[1]) for packet in packets: if packet[TCP].sport == 1337: data = packet[TCP].payload if len(data) > 0: f = StringIO.StringIO(bytes(data)) status_line = f.readline() msg = HTTPMessage(f, 0) body = msg.fp.read() body_stream = StringIO.StringIO(body[4:-7]) gzipper = gzip.GzipFile(fileobj=body_stream) data = gzipper.read() m = re.search(r'STL{.*?}', data) if m: print m.group(0)
def __init__(self): self.msg = HTTPMessage(StringIO()) # default status is 200 self.status = 200 self.reason = 'OK'
def parse_flow(IP): p = sub.Popen([ 'tcpflow', '-T %T--%A-%B', '-cJB', '-r', (os.getenv('PROCDOTPLUGIN_WindumpFilePcap')) ], stdout=sub.PIPE, stderr=sub.PIPE) stdout, stderr = p.communicate() stdout = stdout.replace('\r\n', '\n') if IP not in stdout: e = str("No tcp flows found for ") + IP open(out, 'ab').write(e) else: m = re.findall('\x1b\[0;3[1|4]m(.*?)\x1b\[0m', stdout, re.DOTALL) m = iter(m) for line in m: if IP in line: line = line[56:] match = re.match('^HTTP.*', line) try: if match: length = 1 num = 0 while length != num: d = zlib.decompressobj(16 + zlib.MAX_WBITS) output = StringIO.StringIO(line) status_line = output.readline() msg = HTTPMessage(output, 0) isGZipped = msg.get('content-encoding', '').find('gzip') >= 0 isChunked = msg.get('Transfer-Encoding', '').find('chunked') >= 0 if isGZipped and isChunked: offset = msg.fp.readline() body = msg.fp.read() num = int(offset, 16) encdata = '' newdata = '' encdata = body[:num] length = len(encdata) if length != num: line = line + next(m)[56:] else: newdata = d.decompress(encdata) header = str(msg) open(out, 'ab').write(status_line) open(out, 'ab').write(header) open(out, 'ab').write('\n') open(out, 'ab').write(newdata) elif isGZipped: length = 1 num = 1 body = msg.fp.read() data = d.decompress(body) header = str(msg) open(out, 'ab').write(status_line) open(out, 'ab').write(header) open(out, 'ab').write('\n') open(out, 'ab').write(data) else: length = 1 num = 1 body = msg.fp.read() body = re.sub( '[^!\"#\$%&\'\(\)\*\+,-\./0-9:;<=>\?@A-Z\[\]\^_`a-z\{\|\}\\\~\t\n\r ]', '.', body) header = str(msg) open(out, 'ab').write(status_line) open(out, 'ab').write(header) open(out, 'ab').write('\n') open(out, 'ab').write(body) else: line = re.sub( '[^!\"#\$%&\'\(\)\*\+,-\./0-9:;<=>\?@A-Z\[\]\^_`a-z\{\|\}\\\~\t\n\r ]', '.', line) open(out, 'ab').write(line) except: open(out, 'ab').write('DECOMPRESSION ERROR') open(out, 'ab').write('\n\n') open(out, 'ab').write(line)
def clone(self): cloneobj = HTTPMessage(StringIO(), seekable=0) cloneobj.dict = self.dict.copy() cloneobj.headers = copy(self.headers) return cloneobj
def parse_flow(IP): styleID = ''.join(choice(string.ascii_lowercase + string.digits) for x in range(randint(8, 12))) ssize = len(styleID) p = sub.Popen(['tcpflow', '-T %T--%A-%B', '-cJB', '-r', (os.getenv('PROCDOTPLUGIN_WindumpFilePcap'))], stdout=sub.PIPE, stderr=sub.PIPE) stdout, stderr = p.communicate() stdout = stdout.replace('\r\n', '\n') if IP not in stdout: e = str("No tcp flows found for ")+IP open(out, 'ab').write(e) else: if os.getenv('PROCDOTPLUGIN_PluginEngineVersion') is not None: open(out,'ab').write('{{{style-id:default;color:blue;style-id:'+styleID+';color:red}}}') m = re.findall ( '\x1b\[0;31m(.*?)\x1b\[0m|\x1b\[0;34m(.*?)\x1b\[0m', stdout, re.DOTALL) m = iter(m) for b, r in m: if b == '': if IP in r: r = r[56:] r = re.sub( '[^!\"#\$%&\'\(\)\*\+,-\./0-9:;<=>\?@A-Z\[\]\^_`a-z\{\|\}\\\~\t\n\r ]','.', r) if os.stat(out).st_size <= 53 + ssize: if os.getenv('PROCDOTPLUGIN_PluginEngineVersion') is not None: open(out,'ab').write('<'+styleID+'>'+r+'</'+styleID+'>') else: open(out,'ab').write(r) else: if os.getenv('PROCDOTPLUGIN_PluginEngineVersion') is not None: open(out,'ab').write('\n\n'+'<'+styleID+'>'+r+'</'+styleID+'>') else: open(out,'ab').write('\n\n'+r) else: if IP in b: b = b[56:] match = re.match( '^HTTP.*', b) try: if match: length = 1 num = 0 while length != num: d = zlib.decompressobj(16+zlib.MAX_WBITS) output = StringIO.StringIO(b) status_line = output.readline() msg = HTTPMessage(output, 0) isLength = msg.get('Content-Length') isGZipped = msg.get('content-encoding', '').find('gzip') >= 0 isChunked = msg.get('Transfer-Encoding', '').find('chunked') >= 0 if isGZipped and isChunked: offset = msg.fp.readline() body = msg.fp.read() num = int(offset, 16) encdata = '' newdata = '' encdata =body[:num] length = len(encdata) if length != num: c = next(m) d, e = c b = b + d[56:] else: newdata = d.decompress(encdata) header = str(msg) open(out,'ab').write(status_line) open(out,'ab').write(header) open(out,'ab').write('\n') open(out,'ab').write(newdata) elif isGZipped: length = int(isLength) body = msg.fp.read() num = len(body) if length != num: c = next(m) d, e = c if IP in d: b = b + d[56:] else: data = d.decompress(body) header = str(msg) open(out,'ab').write(status_line) open(out,'ab').write(header) open(out,'ab').write('\n') open(out,'ab').write(data) else: length = 1 num = 1 body = msg.fp.read() body = re.sub( '[^!\"#\$%&\'\(\)\*\+,-\./0-9:;<=>\?@A-Z\[\]\^_`a-z\{\|\}\\\~\t\n\r ]','.', body) header = str(msg) open(out,'ab').write(status_line) open(out,'ab').write(header) open(out,'ab').write('\n') open(out,'ab').write(body) else: b = re.sub( '[^!\"#\$%&\'\(\)\*\+,-\./0-9:;<=>\?@A-Z\[\]\^_`a-z\{\|\}\\\~\t\n\r ]','.', b) open(out,'ab').write(b) except: open(out,'ab').write('DECOMPRESSION ERROR') open(out,'ab').write('\n\n') b = re.sub( '[^!\"#\$%&\'\(\)\*\+,-\./0-9:;<=>\?@A-Z\[\]\^_`a-z\{\|\}\\\~\t\n\r ]','.', b) open(out,'ab').write(b)
def make_headers(fp): return HTTPMessage(fp) if Compatibility.PY2 else parse_headers(fp)
def test_UpnpDevice(): xp = mkxp(ns.device) udn = 'uuid:00000000-0000-0000-001122334455' server_name = 'OS/1.0 UPnP/1.0 pyupnp/1.0' dd = resource_filename(upnp.__name__, 'xml/ms.xml') resource_filename(upnp.__name__, 'xml/cds.xml') resource_filename(upnp.__name__, 'xml/cms.xml') def soap_app(environ, start_response): pass # __init__ device = UpnpDevice(udn, dd, None) assert udn == device.udn assert UpnpDevice.SERVER_NAME == device.server_name assert None == device.soap_app assert udn == device.dd.findtext(xp('device/UDN')) device = UpnpDevice(udn, dd, soap_app, server_name) assert udn == device.udn assert server_name == device.server_name assert soap_app == device.soap_app.app assert udn == device.dd.findtext(xp('device/UDN')) # make_notify_packets host = '127.0.0.1:1900' ip = '192.168.0.100' port = 19000 addr = (ip, port) dest = '192.168.0.101' location = device.make_location(ip, port) sa = device.make_notify_packets(host, ip, port, 'ssdp:alive') sb = device.make_notify_packets(host, ip, port, 'ssdp:byebye') for packets in (sa, sb): for packet in packets: d = dict(packet) assert host == d.get('HOST') assert d.get('USN').startswith(udn) if d.get('NTS') == 'ssdp:alive': assert location == d.get('LOCATION') assert device.server_name == d.get('SERVER') # make_msearch_response # ssdp:all headers = HTTPMessage(StringIO('ST: ssdp:all')) packets = device.make_msearch_response(headers, addr, dest) assert 3 + 2 * 0 + len(device.services) == len(packets) for packet in packets: d = dict(packet) assert '' == d.get('EXT') assert location == d.get('LOCATION') assert device.server_name == d.get('SERVER') assert d.get('USN').startswith(device.udn) # invalid ST headers = HTTPMessage(StringIO('ST: xxxx')) packets = device.make_msearch_response(headers, addr, dest) assert [] == packets # UDN headers = HTTPMessage(StringIO('ST: ' + device.udn)) packets = device.make_msearch_response(headers, addr, dest) assert 1 == len(packets) d = dict(packets[0]) assert '' == d.get('EXT') assert location == d.get('LOCATION') assert device.server_name == d.get('SERVER') assert device.udn == d.get('ST') == d.get('USN') # serviceType for serviceType in device.serviceTypes + ['upnp:rootdevice']: headers = HTTPMessage(StringIO('ST: ' + serviceType)) packets = device.make_msearch_response(headers, addr, dest) assert 1 == len(packets) d = dict(packets[0]) assert '' == d.get('EXT') assert location == d.get('LOCATION') assert device.server_name == d.get('SERVER') assert serviceType == d.get('ST') assert '%s::%s' % (device.udn, serviceType) == d.get('USN') # __call__ try: from webtest import TestApp from routes.middleware import RoutesMiddleware sid = 'urn:upnp-org:serviceId:ConnectionManager' base = UpnpBase() app = TestApp(RoutesMiddleware(device, base.mapper)) # DD res = app.get('/upnp/' + udn) # SCPD res = app.get('/upnp/%s/%s' % (udn, sid)) # SOAP #res = app.get('/%s/%s/%s' % (udn, sid, 'soap')) except ImportError: pass
def __init__(self): self.msg = HTTPMessage(StringIO()) self.msg['content-type'] = 'text/plain' self.status = 400 self.reason = 'Bad Request'
def make_headers(fp): return HTTPMessage(fp) if PY2 else parse_headers(fp)
def parse_headers(fp): return HTTPMessage(fp, 0)
def __init__(self, method, path, version='HTTP/1.1'): self.method = method.upper() self.path = path self.version = version self.msg = HTTPMessage(StringIO()) self.body = ''