def on_body(self): request = self.current_request self.current_request = None status = self.status self.status = None self.status_line = "" self.chunk_header = "" self.current_chunk = stringio() headers = self.current_headers or {} self.current_headers = "" body = self.current_body body.seek(0) # _log.debug('Got body: %r', body.getvalue()) self.current_body = stringio() self.waiting_for = "request" self.event("on_body", self, request, status, headers, body) if headers.get("Connection", headers.get("Proxy-Connection")) == "close": log.debug( 'got "Connection: %s", "Proxy-Connection: %s", closing', headers.get("Connection"), headers.get("Proxy-Connection"), ) self.close() self.on_close()
def on_body(self): request = self.current_request self.current_request = None status = self.status self.status = None self.status_line = '' self.chunk_header = '' self.current_chunk = stringio() headers = self.current_headers or {} self.current_headers = '' body = self.current_body body.seek(0) #_log.debug('Got body: %r', body.getvalue()) self.current_body = stringio() self.waiting_for = 'request' self.event('on_body', self, request, status, headers, body) if headers.get('Connection', headers.get('Proxy-Connection')) == 'close': log.debug('got "Connection: %s", "Proxy-Connection: %s", closing', headers.get('Connection'), headers.get('Proxy-Connection')) self.close() self.on_close()
def test_size(self): data = default_block_size * 2 * '.' self.assert_equal(['....'], list(gen_fragments(stringio(data), 4))) expected = [ default_block_size * '.', '..' ] actual = list(gen_fragments(stringio(data), default_block_size + 2)) assert expected == actual
def test_nosize(self): block = default_block_size * '.' data = block + block actual = list(gen_fragments(stringio(data))) assert [block, block] == actual data = block + '...' actual = list(gen_fragments(stringio(data))) assert [block, '...'] == actual
def extract_descriptors(args): if os.path.exists(args.outfilename) and not args.force: sys.stderr.write( "\nFile {} exists. Use -f/--force to overwrite.\n\n".format( args.outfilename ) ) sys.exit(1) outfile = open(args.outfilename, "w") schema_file_name = os.path.join(os.path.abspath(os.path.curdir), args.infilename) infile = stringio() sys.path.append(args.path) process_includes = import_module("process_includes") process_includes.process_include_files( args.infilename, infile, inpath=schema_file_name ) infile.seek(0) doc = etree.parse(infile) root = doc.getroot() descriptors = {} extract(root, descriptors, outfile) for descriptor in list(descriptors.values()): descriptor.export(outfile) outfile.close()
def extract_descriptors(args): if os.path.exists(args.outfilename) and not args.force: sys.stderr.write( '\nFile %s exists. Use -f/--force to overwrite.\n\n' % (args.outfilename, )) sys.exit(1) if sys.version_info.major == 3: outfile = open(args.outfilename, 'w', encoding='utf-8') else: outfile = open(args.outfilename, 'w') schema_file_name = os.path.join(os.path.abspath(os.path.curdir), args.infilename) infile = stringio() process_includes.process_include_files(args.infilename, infile, inpath=schema_file_name) infile.seek(0) doc = etree.parse(infile) root = doc.getroot() descriptors = {} extract(root, descriptors, outfile) for descriptor in list(descriptors.values()): descriptor.export(outfile) outfile.close()
def decode_openpgp_packet(d): f = stringio(d) tag = ord(f.read(1)) # don't handle new fmt packets assert (tag >> 7) & 1 == 1 assert (tag >> 6) & 1 == 0 # or unlengthed packets lentype = tag & 3 assert lentype in (0, 1, 2) ptype = tag >> 2 & 15 if lentype == 0: plen = ord(f.read(1)) elif lentype == 1: plen, = struct.unpack('>H', f.read(2)) else: plen, = struct.unpack('>I', f.read(4)) payload = f.read(plen) assert len(payload) == plen return ptype, payload
def parse_preprocess_xsd(options): schema_file_name = os.path.join(os.path.abspath(os.path.curdir), options.infilename) infile = stringio() process_includes = importlib.import_module("process_includes") process_includes.process_include_files(options.infilename, infile, inpath=schema_file_name) infile.seek(0) doc = etree.parse(infile) return doc.getroot()
def process_chunk_data(self): chunk = self.current_chunk.getvalue() self.current_chunk = stringio() if len(chunk) == 0: self.waiting_for = 'request' self.on_body() return self.CRLF else: header = self.chunk_header self.chunk_header = '' self.waiting_for = 'chunk-header' return self.decode_chunk(header, chunk)
def process_chunk_data(self): chunk = self.current_chunk.getvalue() self.current_chunk = stringio() if len(chunk) == 0: self.waiting_for = "request" self.on_body() return self.CRLF else: header = self.chunk_header self.chunk_header = "" self.waiting_for = "chunk-header" return self.decode_chunk(header, chunk)
def test_writer(self): a = Element('a') b = SubElement(a, 'b') b.append(Comment('a comment')) c = SubElement(b, 'c', d = 'e') f = SubElement(c, 'f') f.text = 'g' h = SubElement(c, 'h') h.text = 'i << >> << &&' b.append(ProcessingInstruction('pdk', 'processing instruction')) tree = ElementTree(a) output = stringio() write_pretty_xml(tree, output) self.assert_equals_long(expected_xml_output, output.getvalue())
def tostring(self): string = stringio() string.write(str(self.rel_type)) string.write(self.TRP_DELIMITER) string.write(str(self.frequency)) for term_pos in self.arguments: string.write(self.TRP_DELIMITER) if term_pos is None: string.write(self.EMPTY_ARGUMENT) else: string.write(term_pos[0]) string.write(self.TRM_DELIMITER) string.write(term_pos[1]) return string.getvalue()
def __str__(self): string = stringio() string.write("<") string.write(REL_ID_NAME_MAP[self.rel_type]) string.write(" ") for term_pos in self.arguments: if term_pos is None: string.write("None") else: string.write("%s-%s" % (term_pos[0], POS_ID_NAME_MAP[term_pos[1]])) string.write(" ") string.write(str(self.frequency)) string.write(">") return string.getvalue()
def __init__(self, conn=None): self._connected = False Events.EventMixin.__init__(self) AsyncSocket.__init__(self, conn) self.buffer = [] self.status_line = '' self.status = None self.chunk_header = '' self.current_request = None self.current_body = stringio() self.current_chunk = stringio() self.current_headers = '' self.body_length = 0 self.waiting_for = 'request' self.set_terminator(self.CRLF) self.ssl = False self.ssl_want = None self.lastbuffer = None self._sent_data = False
def download(binary, version): webfile = requests.get( "https://releases.hashicorp.com/{binary}/{version}/" "{binary}_{version}_{system}_amd64.zip".format( binary=binary, version=version, system=platform.system().lower() ) ) webfile2 = zipfile.ZipFile(stringio(webfile.content)) content = webfile2.open(webfile2.filelist[0]).read() filepath = "/tmp/" + binary localfile = open(filepath, "wb") localfile.write(content) localfile.close() os.chmod(filepath, 0o755) return filepath
def __init__(self, conn=None): self._connected = False Events.EventMixin.__init__(self) AsyncSocket.__init__(self, conn) self.buffer = [] self.status_line = "" self.status = None self.chunk_header = "" self.current_request = None self.current_body = stringio() self.current_chunk = stringio() self.current_headers = "" self.body_length = 0 self.waiting_for = "request" self.set_terminator(self.CRLF) self.ssl = False self.ssl_want = None self.lastbuffer = None self._sent_data = False
def _format_headers(self, req): buf = stringio() write = buf.write header_dicts = [req.headers, req.unredirected_hdrs] for d in header_dicts: for key, value in d.items(): write(key) write(": ") write(value) write("\r\n") write("\r\n") return buf.getvalue()
def _format_headers(self, req): buf = stringio() write = buf.write header_dicts = [req.headers, req.unredirected_hdrs] for d in header_dicts: for key, value in d.items(): write(key) write(': ') write(value) write('\r\n') write('\r\n') return buf.getvalue()
def test_split(self): data = '''a|b a|c b|b b|c b|d c|zaa a|z local:/z|asdfjkl; local:/z|qwerty ''' handle = stringio(data) expected = {'a': ['b', 'c', 'z'], 'b': ['b', 'c', 'd'], 'c': ['zaa'], 'local:/z': ['asdfjkl;', 'qwerty'] } self.assert_equal(expected, split_pipe(handle))
def test_xml_read_then_write(self): tree = parse_xml(stringio(expected_xml_output)) output = stringio() write_pretty_xml(tree, output) self.assert_equals_long(expected_xml_output, output.getvalue())
def test_pickle(self, rossi): from pickle import dump for fitter in [CoxPHFitter, AalenAdditiveFitter]: output = stringio() f = fitter().fit(rossi, 'week', 'arrest') dump(f, output)
def test_zero_length(self): underlying = 'abc\ndef\0\0fgh\0ijk' handle = stringio(underlying) self.assert_equal(['abc\ndef', '', 'fgh', 'ijk'], list(NullTerminated(handle)))
def test_over_block_size(self): underlying = 'abc\ndef\0fgh\0ijk' handle = stringio(underlying) self.assert_equal(['abc\ndef', 'fgh', 'ijk'], list(NullTerminated(handle, 5)))
def test_null_terminated_missing_last(self): underlying = 'abc\ndef\0fgh\0ijk' handle = stringio(underlying) self.assert_equal(['abc\ndef', 'fgh', 'ijk'], list(NullTerminated(handle)))
def test_pickle(self, rossi, regression_models): from pickle import dump for fitter in regression_models: output = stringio() f = fitter.fit(rossi, 'week', 'arrest') dump(f, output)
def test_terminator_at_block_end(self): underlying = 'abc\ndef\0fgh\0ijk' handle = stringio(underlying) self.assert_equal(['abc\ndef', 'fgh', 'ijk'], list(NullTerminated(handle, 8)))