def test_remains_file_pointer_in_function(self): dummy_file = StringIO('abcdefg') @remain_file_pointer def seek_into_file(file_obj): file_obj.seek(1, 0) dummy_file.seek(2, 0) self.assertEqual(2, dummy_file.tell()) seek_into_file(dummy_file) self.assertEqual(2, dummy_file.tell())
def test_put_should_accept_file_like_objects(self): """ put()'s local_path arg should take file-like objects too """ local = self.path('whatever') fake_file = StringIO() fake_file.write("testing file-like objects in put()") pointer = fake_file.tell() target = '/new_file.txt' with hide('everything'): put(fake_file, target) get(target, local) eq_contents(local, fake_file.getvalue()) # Sanity test of file pointer eq_(pointer, fake_file.tell())
def read(self, length=-1): read_buf = StringIO() remaining = length while True: self.lock.acquire() try: # Read will block forever until we close the file: if self.eof and len(self.buffers) == 0: break elif len(self.buffers) == 0: continue buffer = self.buffers[0] buffer.seek(self.read_pos) read_buf.write(buffer.read(remaining)) self.read_pos = buffer.tell() if length == -1: # we did not limit the read, we exhausted the buffer, so delete it. # keep reading from remaining buffers. del self.buffers[0] self.read_pos = 0 else: #we limited the read so either we exhausted the buffer or not: remaining = length - read_buf.tell() if remaining > 0: # exhausted, remove buffer, read more. # keep reading from remaining buffers. del self.buffers[0] self.read_pos = 0 else: break finally: self.lock.release() return read_buf.getvalue()
class Node: def __init__(self): self._has_value = False self._text = StringIO() def _separator(self): return ',' if self._has_value else '{' def add_tag(self, tag): print('{}"{}":'.format(self._separator(), tag)) if not self._has_value: self._has_value = True def add_value(self, value): print(json.dumps(value)) def append_multiline_text(self, text): self._text.write(text) def add_timestamp(self, iso): local = datetime.strptime(iso, '%Y-%m-%dT%H:%M:%SZ').timestamp() utc = local - time.timezone print(int(utc)) # Adds previously appended text def _finish_multiline_text(self): print(json.dumps(self._text.getvalue().replace('\n', ' '))) self._text.seek(0) def close(self): self._finish_multiline_text() if self._text.tell() > 0 else print('}')
def response(self, pdu): if _debug: PickleActorMixIn._debug("response %r", pdu) # add the data to our buffer self.pickleBuffer += pdu.pduData # build a file-like object around the buffer strm = StringIO(self.pickleBuffer) pos = 0 while (pos < strm.len): try: # try to load something msg = pickle.load(strm) except: break # got a message rpdu = PDU(msg) rpdu.update(pdu) super(PickleActorMixIn, self).response(rpdu) # see where we are pos = strm.tell() # save anything left over, if there is any if (pos < strm.len): self.pickleBuffer = self.pickleBuffer[pos:] else: self.pickleBuffer = ''
class FifoBuffer(io.TextIOBase): def __init__(self): self.buf = StringIO() self.len = 0 def read(self): """Reads data from buffer""" self.buf.seek(0) l = self.len self.len=0 res = self.buf.read(l) self.buf.seek(0) return res def write(self, arg): self.len = self.len + self.buf.write(arg) def peek(self): x = self.buf.tell() self.buf.seek(0) res = self.buf.read(x) self.buf.seek(x) return res
def to_python(self, value): value = super(FileOrURLField, self).to_python(value) if self.to == None: return value elif self.to == 'file' and not isinstance(value, UploadedFile): try: resp = requests.get(value) except: raise ValidationError(self.url_fetch_error) if not (200 <= resp.status_code < 400): raise ValidationError(self.url_fetch_error) io = StringIO(unicode(resp.content)) io.seek(0) io.seek(os.SEEK_END) size = io.tell() io.seek(0) return InMemoryUploadedFile( io, None, posixpath.basename(value), resp.headers['content-type'], size, None) elif self.to == 'url' and isinstance(value, UploadedFile): path = default_storage.save( posixpath.join(self.upload_to, value.name), ContentFile(value.read())) if self.no_aws_qs: default_storage.querystring_auth = False return default_storage.url(path) return value
def do_POST(self): """Serve a POST request.""" r, info = self.deal_post_data() print(r, info, "by: ", self.client_address) f = StringIO() f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Upload Result Page</title>\n") f.write("<body>\n<h2>Upload Result Page</h2>\n") f.write("<hr>\n") if r: f.write("<strong>Success:</strong>") else: f.write("<strong>Failed:</strong>") f.write(info) f.write("<br><a href=\"%s\">back</a>" % self.headers['referer']) f.write("<hr><small>Powered By: bones7456, check new version at ") f.write("<a href=\"http://li2z.cn/?s=SimpleHTTPServerWithUpload\">") f.write("here</a>.</small></body>\n</html>\n") length = f.tell() f.seek(0) self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", str(length)) self.end_headers() if f: self.copyfile(f, self.wfile) f.close()
def readPDAEPartial(data, refinements_read, num_refinements): fakebuf = StringIO(data) lines_left = data.count('\n') if num_refinements is None: pdae_line, num_refinements = readPDAEHeader(fakebuf) lines_left -= 2 pm_refinements = [] while lines_left > 1 and refinements_read < num_refinements: num_operations = readPDAEnumops(fakebuf) lines_left -= 1 if lines_left < num_operations: break refinement_ops = readPDAErefinement(fakebuf, num_operations) lines_left -= len(refinement_ops) refinements_read += 1 pm_refinements.append(refinement_ops) num_operations = None data_left = data[fakebuf.tell():] if num_operations is not None: data_left = "%d\n" % num_operations + data_left return (refinements_read, num_refinements, pm_refinements, data_left)
def _add_to_tar(tar, fname, content): value = StringIO(unicode(content)) info = tarfile.TarInfo(name=fname) value.seek(0, os.SEEK_END) info.size = value.tell() value.seek(0, os.SEEK_SET) tar.addfile(tarinfo=info, fileobj=value)
def run_test(conf_url=querymod.configuration.get('url', None)): """ Run the unit tests and make sure the server is online.""" if conf_url is None: raise ValueError("Please create a local api_config.json file in the " "root directory of the repository with 'url' defined " "with the root URL of the server. (No /rest or " "/jsonrpc should be present.) Or provide URL on " "command line.") # Tell the test framework where to query global url url = conf_url results = StringIO() # Run the test demo_test = unittest.TestLoader().loadTestsFromTestCase(TestAPI) unittest.TextTestRunner(stream=results).run(demo_test) # See if the end of the results says it passed results.seek(results.tell() - 3) if results.read() == "OK\n": sys.exit(0) else: results.seek(0) print(results.read()) sys.exit(1)
def do_POST(self): """Serve a POST request.""" r, info = self.deal_post_data() print(r, info, "by: ", self.client_address) f = StringIO() f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Upload Result Page</title>\n") f.write("<body>\n<h2>Upload Result Page</h2>\n") f.write("<hr>\n") if r: f.write("<strong>Success:</strong>") else: f.write("<strong>Failed:</strong>") f.write(info) f.write("<br><a href=\"%s\">back</a>" % self.headers['referer']) f.write("<hr><small>Powered By: bones7456, Ported By:0312birdzhang ") f.write("here</a>.</small></body>\n</html>\n") length = f.tell() f.seek(0) self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", str(length)) self.end_headers() if f: for i in f.readlines(): #self.copyfile(f, self.wfile) self.wfile.write(i.encode("utf-8")) f.close()
def trystringio(n): if n == 1: f = StringIO() f.write('Hello world!') #s = f.readline() s = f.getvalue() print(s) if n == 2: f = StringIO('Hello!\nHi!\nGoodbye!') s = f.readline() print(s) if n == 3: f = StringIO('Hello!\nHi!\nGoodbye!') for l in f: print(l, end='') if n == 4: f = StringIO() f.write('Hello world!\nHi\nGoodbye!') print('Pos: ', f.tell()) f.seek(0) for l in f: print(l, end='') return
def test_get_first_last_lines(): # Using bytes instead of strings since BytesIO I'm using to mock filehandle operations requires this. first_line = 'first line\n' middle_line = 'middle line\n' last_line = 'last line\n' data = first_line + middle_line + last_line f = StringIO() f.write(data) orig_pos = f.tell() ft = FileTools() first, last = ft.get_first_last_lines(f) assert orig_pos == f.tell( ) # Assert file_handle position returned to where it was originally assert first_line == first assert last_line == last
def _get_source_string_and_consume_token_characters(source: ParseSource, source_io: io.StringIO) -> str: num_chars_consumed = source_io.tell() if source.remaining_part_of_current_line[num_chars_consumed - 1].isspace(): num_chars_consumed -= 1 ret_val = source.remaining_source[:num_chars_consumed] source.consume_part_of_current_line(num_chars_consumed) return ret_val
def parse_text(self, message_text, body_parsing=True): self.body_parsing = body_parsing stream = StringIO(message_text) self.__parse(stream) self.build_message() self.original_length = stream.tell() if self.keep_original_data: self.original_data = message_text[:self.original_length]
def printHeader(self): _buffer = StringIO() _buffer.write(self.title) _buffer.write(':\n') # keep track of position before printing columns names # in order to draw the line _start = _buffer.tell() for idx in range(len(self.columnsNames)): _buffer.write(self.columnsNames[idx].center( self.columnsWidths[idx])) _buffer.write(self.columnsSeparator) _buffer.write('\n') # compute needed length for subline _buffer.write((_buffer.tell() - _start - 1) * '-') print(_buffer.getvalue(), file=self.printer) _buffer.close()
def download_with_progress_bar(self, data_url, return_buffer=False): """Download a file, showing progress. Parameters ---------- data_url : string web address. return_buffer : boolean (optional) if true, return a StringIO buffer rather than a string. Returns ------- str Content of the file. """ num_units = 40 fhandle = urlopen(data_url, timeout=6) if sys.version_info[0] >= 3: total_size = int(fhandle.getheader('Content-Length').strip()) else: total_size = int(fhandle.headers.getheader('Content-Length').strip()) chunk_size = total_size // num_units print("Downloading %s" % data_url) nchunks = 0 buf = StringIO() total_size_str = self.bytes_to_string(total_size) # total_size_str=total_size_str.decode('utf-8') while True: try: next_chunk = fhandle.read(chunk_size) nchunks += 1 except timeout: print('request timeout for %s' % data_url) next_chunk = None if next_chunk: buf.write(next_chunk.decode('utf-8')) s = ('[' + nchunks * '=' + (num_units - 1 - nchunks) * ' ' + '] %s / %s \r' % (self.bytes_to_string(buf.tell()), total_size_str)) else: sys.stdout.write('\n') break sys.stdout.write(s) sys.stdout.flush() #buf.reset() buf.seek(0) if return_buffer: return buf else: return buf.getvalue()
def upload(self, fileobj=None, filename=None, description='', ignore=False, file_size=None, url=None, session_key=None): if self.version[:2] < (1, 16): return compatibility.old_upload(self, fileobj=fileobj, filename=filename, description=description, ignore=ignore, file_size=file_size) image = self.Images[filename] if not image.can('upload'): raise errors.InsufficientPermission(filename) predata = {} predata['comment'] = description if ignore: predata['ignorewarnings'] = 'true' predata['token'] = image.get_token('edit') predata['action'] = 'upload' predata['format'] = 'json' predata['filename'] = filename if url: predata['url'] = url if session_key: predata['session_key'] = session_key if fileobj is None: postdata = self._query_string(predata) else: if type(fileobj) is str: file_size = len(fileobj) fileobj = StringIO(fileobj) if file_size is None: fileobj.seek(0, 2) file_size = fileobj.tell() fileobj.seek(0, 0) postdata = upload.UploadFile('file', filename, file_size, fileobj, predata) wait_token = self.wait_token() while True: try: data = self.raw_call('api', postdata).read() if pythonver >= 3: info = json.loads(data.decode('utf-8')) else: info = json.loads(data) if not info: info = {} if self.handle_api_result(info, kwargs=predata): return info.get('upload', {}) except errors.HTTPStatusError as exc: e = exc.args if pythonver >= 3 else exc if e[0] == 503 and e[1].getheader('X-Database-Lag'): self.wait(wait_token, int(e[1].getheader('Retry-After'))) elif e[0] < 500 or e[0] > 599: raise else: self.wait(wait_token) except errors.HTTPError: self.wait(wait_token) fileobj.seek(0, 0)
def caculate_hotwords(buffer: io.StringIO): char_map = {} while True: char = buffer.read(1) # type:str if not char: break if ord(char) <= 0x7F or char in exclude_signs: continue if char not in char_map: char_map[char] = [0, []] position = buffer.tell() scope = buffer.read(HOTWORD_SEARCH_DEPTH) buffer.seek(position) item = char_map[char] item[1].append(scope) item[0] += 1 strip_char_map(char_map) debug.log('char') char_list = list(char_map.keys()) temp_list = [] for char in char_list: if char_map[char][0] > 2: temp_list.append(char) char_list = temp_list def char_rank_sort(a, b): return -1 if char_map[a][0] > char_map[b][0] else 1 char_list.sort(key=cmp_to_key(char_rank_sort)) debug.log('char-sort') strip_map = {} for char in char_list: item = char_map[char] data_list = item[1] # type: list[str] result = iterate_search(data_list, char) for word in result: if word not in strip_map: strip_map[word] = 0 strip_map[word] += 1 word_list = [] debug.log('search-hotword') for word in strip_map.keys(): num = strip_map[word] if num == 1: continue word_list.append((word, num)) debug.log('strip-none') word_list = strip_redundants(data_list=word_list) debug.log('strip-redundants') def hotword_rank_sort(a: Tuple[str, int], b: Tuple[str, int]): if a[1] != b[1]: return 1 if a[1] > b[1] else -1 if len(a[0]) != len(b[0]): return 1 if len(a[0]) > len(b[0]) else -1 return 1 if a[0] > b[0] else -1 word_list.sort(key=cmp_to_key(hotword_rank_sort)) debug.log('sort') length = len(word_list) output_limit = MAX_RESULT_NUM if MAX_RESULT_NUM > 0 else length offset = length - output_limit for n in range(offset, length): word, num = word_list[n] print(word, num) if n + 1 - offset >= output_limit: break
def download_with_progress_bar(self, data_url, return_buffer=False): """Download a file, showing progress. Parameters ---------- data_url : string web address. return_buffer : boolean (optional) if true, return a StringIO buffer rather than a string. Returns ------- str Content of the file. """ num_units = 40 fhandle = urlopen(data_url, timeout=6) if sys.version_info[0] >= 3: total_size = int(fhandle.getheader('Content-Length').strip()) else: total_size = int( fhandle.headers.getheader('Content-Length').strip()) chunk_size = total_size // num_units print("Downloading %s" % data_url) nchunks = 0 buf = StringIO() total_size_str = self.bytes_to_string(total_size) # total_size_str=total_size_str.decode('utf-8') while True: try: next_chunk = fhandle.read(chunk_size) nchunks += 1 except timeout: print('request timeout for %s' % data_url) next_chunk = None if next_chunk: buf.write(next_chunk.decode('utf-8')) s = ('[' + nchunks * '=' + (num_units - 1 - nchunks) * ' ' + '] %s / %s \r' % (self.bytes_to_string(buf.tell()), total_size_str)) else: sys.stdout.write('\n') break sys.stdout.write(s) sys.stdout.flush() #buf.reset() buf.seek(0) if return_buffer: return buf else: return buf.getvalue()
def test_get_current_line(): ft = FileTools() text = '\ntest1\ntest2\n' log.debug(f"\ntext: {repr(text)}") handle = StringIO(text) handle.seek(0, os.SEEK_SET) line = ft._get_current_line(handle) assert line == '\n' assert handle.tell() == 0 # Get 'test1\n' by starting at every single character # Each iteration should return 'test1\n' for i in range(1, 7): handle.seek(i) line = ft._get_current_line(handle) assert line == 'test1\n' assert handle.tell() == 1 # Test readline then previous line to make sure they return the same line handle.seek(1) line = handle.readline() assert line == 'test1\n' line = ft._get_previous_line(handle) assert line == 'test1\n' assert handle.tell() == 1 # Get 'test2\n' by starting at every single character for i in range(7, 12): handle.seek(i) line = ft._get_current_line(handle) assert line == 'test2\n' assert handle.tell() == 7 # Test without newline at the beginning text = 'test1\ntest2\n' log.debug(f"\ntext: {repr(text)}") handle = StringIO(text) for i in range(0, 6): # Get 'test1\n' by starting at every single character handle.seek(i) line = ft._get_current_line(handle) assert line == 'test1\n' assert handle.tell() == 0
def json_to_table(auth, project_id, dataset_id, table_id, json_data, schema=None, disposition='WRITE_TRUNCATE', wait=True): if project.verbose: print('BIGQUERY JSON TO TABLE: ', project_id, dataset_id, table_id) buffer_data = StringIO() has_rows = False for is_last, record in flag_last(json_data): # check if json is already string encoded, and write to buffer buffer_data.write( record if isinstance(record, str) else json.dumps(record)) # write the buffer in chunks if is_last or buffer_data.tell() + 1 > BIGQUERY_BUFFERSIZE: if project.verbose: print('BigQuery Buffer Size', buffer_data.tell()) buffer_data.seek(0) # reset for read io_to_table(auth, project_id, dataset_id, table_id, buffer_data, 'NEWLINE_DELIMITED_JSON', schema, 0, disposition) # reset buffer for next loop, be sure to do an append to the table buffer_data.seek(0) #reset for write buffer_data.truncate( ) # reset for write ( yes its needed for EOF marker ) disposition = 'WRITE_APPEND' # append all remaining records has_rows = True # if not end append newline, for newline delimited json else: buffer_data.write('\n') # if no rows, clear table to simulate empty write if not has_rows: if project.verbose: print('BigQuery Zero Rows') return io_to_table(auth, project_id, dataset_id, table_id, buffer_data, 'NEWLINE_DELIMITED_JSON', schema, skip_rows, disposition, wait)
def test_writeResults(self): """ L{DistTrialRunner.writeResults} writes to the stream specified in the init. """ stringIO = StringIO() result = DistReporter(Reporter(stringIO)) self.runner.writeResults(result) self.assertTrue(stringIO.tell() > 0)
class StdErrWrapper(object): """ Fake file-like stream object that redirects stderr to a logger instance. """ def __init__(self, logger, log_level=logging.ERROR): self.__logger = logger self.__log_level = log_level self.__buffer = StringIO() if sys.version_info[0] >= 3: def __write(_buffer): """ Write the given buffer to the temporary buffer. """ self.__buffer.write(_buffer) else: def __write(_buffer): """ Write the given buffer to log. """ _buffer = _buffer.decode('UTF-8') self.__buffer.write(_buffer) if _buffer == '\n': self.flush() self.write = __write def update_log_level(self, log_level=logging.ERROR): """ Update the logging level of this stream. """ self.__log_level = log_level @staticmethod def __filter_record(record): msg = record.msg.strip() msg = msg.splitlines()[-1] msg = msg.split(': ')[1:] record.msg = ''.join(msg) + '\n' + record.msg return record def flush(self): """ Flush the buffer, if applicable. """ if self.__buffer.tell() > 0: # Write the buffer to log # noinspection PyProtectedMember self.__logger._log(level=self.__log_level, msg=self.__buffer.getvalue().strip(), record_filter=StdErrWrapper.__filter_record) # Remove the old buffer self.__buffer.truncate(0) self.__buffer.seek(0)
def test_w_multiple_files(self): from io import StringIO from ..._compat import TEXT VALID1 = TEXT('CODE1 PID1 ID1 234.56 DESC1\n' 'CODE3 PID3 ID3 345.67 DESC3\n' ) buf1 = StringIO(VALID1) VALID2 = TEXT('CODE2 PID2 ID2 123.45 DESC2') buf2 = StringIO(VALID2) code, pid, id_, fromepoch, desc = self._callFUT([buf1, buf2]) self.assertEqual(code, TEXT('CODE2')) self.assertEqual(pid, TEXT('PID2')) self.assertEqual(id_, TEXT('ID2')) self.assertEqual(fromepoch, 123.45) self.assertEqual(desc, TEXT('DESC2')) # selected line is consumed, others are pushed back self.assertEqual(buf1.tell(), 0) self.assertEqual(buf2.tell(), len(VALID2))
def outline_formatter(outline): # Remove blank lines outline_sans_blank_lines = StringIO() for line in outline: if line.rstrip(): outline_sans_blank_lines.write(line) outline_sans_blank_lines.seek(0) # Create a copy of the outline to be used for searches during the looping process # This will help us determine the correct bullet icon to use searchable_outline = StringIO(outline_sans_blank_lines.getvalue()) # Format bullets output = "" sequences = BulletSequences( ) # Should allow us to handle an 'infinite' amount of asterisks for current_line in outline_sans_blank_lines: if current_line.startswith("*"): # Split the line separating the bullet identifier from the actual sentence asterisks, sentence = current_line.split(" ", 1) # Ensure our sequence holder is the correct length difference = len(asterisks) - sequences.num_of_sequences if difference > 0: sequences.add_sequences(difference) elif difference < 0: sequences.remove_sequences(difference) sequences.increment_sequence() # Assemble line output += ' '.join( ['.'.join([str(n) for n in sequences.sequences]), sentence]) elif current_line.startswith("."): # Get indentation of next line searchable_outline.seek(outline_sans_blank_lines.tell()) next_line = next((line for line in searchable_outline if line.startswith('*') or line.startswith('.')), None) if next_line and next_line.startswith("."): next_line_periods, _ = next_line.split(" ", 1) else: next_line_periods = '' # Get current line information periods, sentence = current_line.split(" ", 1) num_of_periods = len(periods) bullet_icon = ADDITION if num_of_periods < len( next_line_periods) else MINUS # Assemble line output += f" {INDENTATION*num_of_periods}{bullet_icon} {sentence}" else: # Assemble line # Should inherit indentation from previous line output += f" {INDENTATION*num_of_periods} {current_line}" return output
def old_upload(self, fileobj, filename, description, license='', ignore=False, file_size=None): image = self.Images[filename] if not image.can('upload'): raise errors.InsufficientPermission(filename) if image.exists and not ignore: raise errors.FileExists(filename) if type(fileobj) is str: file_size = len(fileobj) fileobj = StringIO(fileobj) if file_size is None: fileobj.seek(0, 2) file_size = fileobj.tell() fileobj.seek(0, 0) predata = {} # Do this thing later so that an incomplete upload won't work # predata['wpDestFile'] = filename predata['wpUploadDescription'] = description predata['wpLicense'] = license if ignore: predata['wpIgnoreWarning'] = 'true' predata['wpUpload'] = 'Upload file' predata['wpSourceType'] = 'file' predata['wpDestFile'] = filename predata['wpEditToken'] = image.get_token('edit') postdata = upload.UploadFile('wpUploadFile', filename, file_size, fileobj, predata) wait_token = self.wait_token() while True: try: self.connection.post(self.host, '%sindex.php?title=Special:Upload&maxlag=%s' % (self.path, self.max_lag), data=postdata).read() except errors.HTTPStatusError as exc: e = exc.args if pythonver >= 3 else exc if e[0] == 503 and e[1].getheader('X-Database-Lag'): self.wait(wait_token, int(e[1].getheader('Retry-After'))) elif e[0] < 500 or e[0] > 599: raise else: self.wait(wait_token) except errors.HTTPError: self.wait(wait_token) else: return fileobj.seek(0, 0)
class EntryWriteStream(object): '''A file-like object for writing an entry to an archive. If the size is known ahead of time and provided, then the file contents are not buffered but flushed directly to the archive. If size is omitted, then the file contents are buffered and flushed in the close() method.''' def __init__(self, archive, pathname, size=None): self.archive = archive self.entry = Entry(pathname=pathname, mtime=time.time(), mode=stat.S_IFREG) if size is None: self.buffer = StringIO() else: self.buffer = None self.entry.size = size self.entry.to_archive(self.archive) self.bytes = 0 self.closed = False def __enter__(self): return self def __exit__(self, *args): self.close() def __del__(self): self.close() def __len__(self): return self.bytes def tell(self): return self.bytes def write(self, data): if self.closed: raise Exception('Cannot write to closed stream.') if self.buffer: self.buffer.write(data) else: _libarchive.archive_write_data_from_str(self.archive._a, data) self.bytes += len(data) def close(self): if self.closed: return if self.buffer: self.entry.size = self.buffer.tell() self.entry.to_archive(self.archive) _libarchive.archive_write_data_from_str(self.archive._a, self.buffer.getvalue()) _libarchive.archive_write_finish_entry(self.archive._a) # Call archive.close() with _defer True to let it know we have been # closed and it is now safe to actually close. self.archive.close(_defer=True) self.archive = None self.closed = True
def list_directory(self, path): exchage_root = self.get_exchange_diretory() try: l = os.listdir(path) except os.error: self.send_error(404, "No permission to list directory") return None l.sort(key=lambda a: a.lower()) f = StringIO() displaypath = html.escape(urllib.parse.unquote(self.path)) f.write('<!DOCTYPE html>') f.write( '<meta name="viewport" content="width=device-width" charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no">' ) f.write("<html>\n<title>内网传输</title>\n") f.write("<body>\n<h2>目录清单 位于%s</h2>\n" % displaypath) f.write("<hr>\n") f.write("<form ENCTYPE=\"multipart/form-data\" method=\"post\">") f.write("<input name=\"file\" type=\"file\"/>") f.write("<input type=\"submit\" value=\"上传\"/>") f.write( "              " ) f.write( "<input type=\"button\" value=\"主目录\" onClick=\"location='/'\">") f.write("</form>\n") f.write( '<h2 style="color:#FF0000">请先选择完文件再点上传,不这样做的话可能会出现奇怪的情况</h2><hr>\n<ul>\n' ) for name in l: fullname = os.path.join(path, name) colorName = displayname = linkname = name if os.path.isdir(fullname): colorName = '<span style="background-color: #CEFFCE;">' + name + '/</span>' displayname = name linkname = name + "/" if os.path.islink(fullname): colorName = '<span style="background-color: #FFBFFF;">' + name + '@</span>' displayname = name # print('xx'*30, path, name, fullname) # filename = os.path.join(exchage_root, os.path.realpath(path, exchage_root), displayname) # print('xx'*30, filename) f.write( '<table><tr><td width="60%%"><a href="%s">%s</a></td><td width="20%%">%s</td><td width="20%%">%s</td></tr>\n' % (urllib.parse.quote(linkname), colorName, Util.sizeof_fmt(os.path.getsize(fullname)), Util.modification_date(fullname))) f.write("</table>\n<hr>\n</body>\n</html>\n") length = f.tell() f.seek(0) self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", str(length)) self.end_headers() return f
def handle(self, *args, **options): for conference in Conference.objects.filter(active=True): # One transaction for each open conference that has registration # open. If registration isn't open then there is nowhere to # register, so don't even try. with transaction.atomic(): whatstr = StringIO() if conference.registrationtype_set.filter(specialtype__in=('spk', 'spkr')).exists(): self.remind_pending_speakers(whatstr, conference) self.remind_unregistered_speakers(whatstr, conference) self.remind_pending_registrations(whatstr, conference) self.remind_pending_multiregs(whatstr, conference) # Do we need to send a central mail? if whatstr.tell(): # More than one character, so we have done something. Send # a report to the conference organizers about it. send_simple_mail(conference.notifyaddr, conference.notifyaddr, "Reminders sent", whatstr.getvalue(), sendername=conference.conferencename, receivername=conference.conferencename, ) for conference in Conference.objects.filter(callforpapersopen=True): # One transaction for each conference with call for papers open, to send reminders # for things related to the cfp. with transaction.atomic(): whatstr = StringIO() self.remind_empty_submissions(whatstr, conference) self.remind_empty_speakers(whatstr, conference) if whatstr.tell(): send_simple_mail(conference.notifyaddr, conference.notifyaddr, "CfP reminders sent", whatstr.getvalue(), sendername=conference.conferencename, receivername=conference.conferencename, )
def generate(): buffer = StringIO() writer = csv.writer(buffer) for row in results: writer.writerow(row) pos = buffer.tell() buffer.seek(0) ret = buffer.read(pos) buffer.seek(0) yield ret
def _bytes_from_io( stream: io.StringIO, name: str, mimetype: typing.Optional[str] = "text/x-python;charset=utf-8" ) -> hikari.Bytes: index = stream.tell() stream.seek(0) data = stream.read() stream.seek(index) return hikari.Bytes(data, name, mimetype=mimetype)
def list_directory(self, path): try: file_list = os.listdir(path) except os.error: self.send_error(404, "No permission to list directory") return None file_list.sort(key=lambda a: a.lower()) f = StringIO() displaypath = html.escape(parse.unquote(self.path)) f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath) f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath) f.write("<hr>\n") f.write("<form ENCTYPE=\"multipart/form-data\" method=\"post\">") f.write("<input name=\"file\" type=\"file\"/>") # f.write("<input name=\"simtex\" type=\"text\"/>") f.write("<input type=\"submit\" value=\"upload\"/>") f.write(" ") f.write( "<input type=\"button\" value=\"HomePage\" onClick=\"location='/'\">" ) f.write("</form>\n") f.write("<hr>\n<ul>\n") # print(self.path) if len(self.path) > 1: file_list.insert(0, os.pardir) for name in file_list: fullname = os.path.join(path, name) colorName = displayname = linkname = name if os.path.isdir(fullname): colorName = '<span style="background-color: #CEFFCE;">' + name + '/</span>' displayname = name linkname = name + "/" if os.path.islink(fullname): colorName = '<span style="background-color: #FFBFFF;">' + name + '@</span>' displayname = name filename = os.getcwd() + '/' + displaypath + displayname f.write( '<table><tr><td width="60%%"><a href="%s">%s</a></td><td width="20%%">%s</td><td width="20%%">%s</td></tr>\n' % (parse.quote(linkname), colorName, sizeof_fmt(os.path.getsize(filename)), modification_date(filename))) f.write("</table>\n<hr>\n</body>\n</html>\n") # html over length = f.tell() # f.seek(0) self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", str(length)) self.end_headers() return f
def gen_headers(filename): output_filename = make_output_filename(filename) if not should_regen(filename, output_filename): return buf = open(filename).read() writer = StringIO() for fn in re.findall( r"^pub (unsafe )?extern \"C\" fn ([A_Za-z0-9_]+)\(([^{]+)?\)" r"(\s+-> ([^{]+))?", buf, re.M | re.DOTALL): args = [] fnName = fn[1] for arg in fn[2].split(","): if not arg: continue arg_name, rs_type = arg.split(":", 1) arg_name = arg_name.strip() rs_type = rs_type.strip() c_type = convert_type(rs_type) if arg_name != "_": args.append("%s %s" % (c_type, arg_name)) else: args.append(c_type) if not args: args.append("void") retType = fn[4].strip() if retType == "": returns = "void" else: returns = convert_type(retType) writer.write(u"%s %s(%s);\n" % (returns, fnName, ", ".join(args))) if writer.tell() > 0: print("Writing %s" % (output_filename)) if not os.path.exists(os.path.dirname(output_filename)): os.makedirs(os.path.dirname(output_filename)) with open(output_filename, "w") as output: output.write( template % { "prototypes": writer.getvalue(), "name": os.path.basename(output_filename).replace( "-", "_").replace(".", "_").upper() })
class WsgiMiddleware: """This middleware is to adapt a WSGI supported Python server framework into Azure Functions. It can be used by either calling the .handle() function or exposing the .main property in a HttpTrigger. """ _logger = logging.getLogger('azure.functions.WsgiMiddleware') _usage_reported = False def __init__(self, app): """Instantiate a WSGI middleware to convert Azure Functions HTTP request into WSGI Python object. Example on handling WSGI app in a HTTP trigger by overwriting the .main() method: import azure.functions as func from FlaskApp import app main = func.WsgiMiddleware(app.wsgi_app).main """ if not self._usage_reported: self._logger.info("Instantiating Azure Functions WSGI middleware.") self._usage_reported = True self._app = app self._wsgi_error_buffer = StringIO() self.main = self._handle def handle(self, req: HttpRequest, context: Optional[Context] = None): """Method to convert an Azure Functions HTTP request into a WSGI Python object. Example on handling WSGI app in a HTTP trigger by calling .handle() in .main() method: import azure.functions as func from FlaskApp import app def main(req, context): return func.WsgiMiddleware(app.wsgi_app).handle(req, context) """ return self._handle(req, context) def _handle(self, req, context): wsgi_request = WsgiRequest(req, context) environ = wsgi_request.to_environ(self._wsgi_error_buffer) wsgi_response = WsgiResponse.from_app(self._app, environ) self._handle_errors() return wsgi_response.to_func_response() def _handle_errors(self): if self._wsgi_error_buffer.tell() > 0: self._wsgi_error_buffer.seek(0) error_message = linesep.join( self._wsgi_error_buffer.readline() ) raise Exception(error_message)
def test_01(): """ Test that the constructor and getvalue method return expected values """ string = 'Test String 1' f = StringIO() f.write(string) assert f.tell() == len(string) assert f.getvalue() == string
def parse_content_blocks(content: str) -> List[ContentBlock]: blocks = [] state = 'open' quotemult = 0 idx = 0 buffer = StringIO() while idx < len(content): c = content[idx] incr = 1 if c == '`': if state == 'open': tick_mult = _substr_cond(content, idx, lambda _, y: y == '`') quotemult = len(tick_mult) state = 'codestart' incr = quotemult if buffer.tell() > 0: blocks.append(ContentBlock(content=buffer.getvalue())) buffer = StringIO() elif state == 'code': tick_mult = _substr_cond(content, idx, lambda _, y: y == '`') ltick = len(tick_mult) # If the number of ticks in a row is equal to or larger than the initial # number of ticks. Assume that the open block is now closed # with the initial number of ticks, and if there are any # more ticks remaining, they will get handled in the next # loop iteration. incr = ltick if ltick < quotemult else quotemult if incr >= quotemult: state = 'open' # Preformatted content blocks can never be empty # this would otherwise be an unterminated block blocks.append( PreformattedContentBlock( content=buffer.getvalue(), wrapping_tick_count=quotemult)) buffer = StringIO() quotemult = 0 else: buffer.write(c) else: if state == 'codestart': state = 'code' buffer.write(c) idx += incr if state == 'code' or state == 'codestart': raise ValueError('Unterminated preformatted content block.') final_block = buffer.getvalue() if final_block: blocks.append(ContentBlock(content=final_block)) return blocks
def test_ifdef(self): """ Tests CPreProcessor #ifdef/#else/#endif preprocessing """ f = StringIO('#ifdef MY_DEFINE\nPPVAR is set to MY_DEFINE\n' '#else\nMY_DEFINE is not set\n#endif\n') pp = CPreProcessor(f) # no defines self.assertEqual(pp.read().strip(), 'MY_DEFINE is not set') f.seek(0) self.assertEqual(f.tell(), 0) self.assertEqual(pp.tell(), 0) pp = CPreProcessor(f, defines=dict(MY_DEFINE='SUCCESS')) self.assertEqual(pp.read().strip(), 'PPVAR is set to SUCCESS')
class StringBufferStream(UnpackStream): def __init__(self, buff, *args, **kwargs): super(StringBufferStream, self).__init__(*args, **kwargs) self._buff = StringIO(buff) self._length = len(buff) def read(self, amount): return self._buff.read(amount) def __len__(self): return self._length - self._buff.tell()
def approximate_wkb(wkb_in): ''' Return an approximation of the input WKB with lower-precision geometry. ''' input, output = StringIO(wkb_in), StringIO() approx_geometry(input, output) wkb_out = output.getvalue() assert len(wkb_in) == input.tell(), 'The whole WKB was not processed' assert len(wkb_in) == len(wkb_out), 'The output WKB is the wrong length' return wkb_out
def gen_headers(filename): output_filename = make_output_filename(filename) if not should_regen(filename, output_filename): return buf = open(filename).read() writer = StringIO() for fn in re.findall( r"^pub (unsafe )?extern \"C\" fn ([A_Za-z0-9_]+)\(([^{]+)?\)" r"(\s+-> ([^{]+))?", buf, re.M | re.DOTALL): args = [] fnName = fn[1] for arg in fn[2].split(","): if not arg: continue arg_name, rs_type = arg.split(":", 1) arg_name = arg_name.strip() rs_type = rs_type.strip() c_type = convert_type(rs_type) if arg_name != "_": args.append("%s %s" % (c_type, arg_name)) else: args.append(c_type) if not args: args.append("void") retType = fn[4].strip() if retType == "": returns = "void" else: returns = convert_type(retType) writer.write(u"%s %s(%s);\n" % (returns, fnName, ", ".join(args))) if writer.tell() > 0: print("Writing %s" % (output_filename)) if not os.path.exists(os.path.dirname(output_filename)): os.makedirs(os.path.dirname(output_filename)) with open(output_filename, "w") as output: output.write(template % { "prototypes": writer.getvalue(), "name": os.path.basename(output_filename).replace( "-", "_").replace(".", "_").upper() })
class StringIO(object): def __init__(self, stringio=None): self.encoding = None self.stringio_object = stringio if self.stringio_object is None: self.stringio_object = WrappedStringIO() def close(self): return self.stringio_object.close() def closed(self, x): return self.stringio_object.closed(x) def flush(self): return self.stringio_object.flush() def getvalue(self, use_pos=None): return self.stringio_object.getvalue(use_pos) def isatty(self): return self.stringio_object.isatty() def __next__(self): return next(self.stringio_object) def read(self, s=None): return self.stringio_object.read(s) def readline(self): return self.stringio_object.readline() def readlines(self): return self.stringio_object.readlines() def reset(self): return self.stringio_object.reset() def seek(self, position): return self.stringio_object.seek(position) def softspace(self, x, base=None): return self.stringio_object.softspace(x, base) def tell(self): return self.stringio_object.tell() def truncate(self): return self.stringio_object.truncate() def write(self, s): return self.stringio_object.write(s) def writelines(self, sequence_of_strings): return self.stringio_object.writelines(sequence_of_strings)
def test_reader_w_exception_and_test_compound(self): fh1 = StringIO(u'1\n2\n3\n4') fh2 = StringIO(u'2\n3\n4\n5') @self.module.register_sniffer('format1') def f1_sniffer(fh): return '1' in fh.readline(), {} @self.module.register_sniffer('format2') def f2_sniffer(fh): return '2' in fh.readline(), {} @self.module.register_reader('format1', TestClass) def f1_reader(fh): fh.read() raise TestingUtilError("File position should be reset now.") @self.module.register_reader('format1, format2', TestClass) def f1_f2_reader(fh1, fh2): fh1.read() fh2.read() raise TestingUtilError("File position should be reset now.") fh1.seek(0) with self.assertRaises(TestingUtilError): self.module.read(fh1, format='format1', into=TestClass) self.assertEqual(0, fh1.tell()) fh1.seek(0) fh2.seek(0) with self.assertRaises(TestingUtilError): self.module.read([fh1, fh2], format='format1, format2', into=TestClass) self.assertEqual(0, fh1.tell()) self.assertEqual(0, fh2.tell()) fh1.close() fh2.close()
def test_reader_into_none_w_exception_and_test_compound(self): fh1 = StringIO(u'1\n2\n3\n4') fh2 = StringIO(u'2\n3\n4\n5') @self.module.register_sniffer('format1') def f1_sniffer(fh): return '1' in fh.readline(), {} @self.module.register_sniffer('format2') def f2_sniffer(fh): return '2' in fh.readline(), {} @self.module.register_reader('format1') def f1_reader(fh): fh.read() raise TestingUtilError("File position should be reset now.") yield @self.module.register_reader('format1, format2') def f1_f2_reader(fh1, fh2): fh1.read() fh2.read() raise TestingUtilError("File position should be reset now.") yield fh1.seek(0) with self.assertRaises(TestingUtilError): next(self.module.read(fh1, format='format1')) self.assertEqual(0, fh1.tell()) fh1.seek(0) fh2.seek(0) with self.assertRaises(TestingUtilError): next(self.module.read([fh1, fh2], format='format1, format2')) self.assertEqual(0, fh1.tell()) self.assertEqual(0, fh2.tell()) fh1.close() fh2.close()
def test_w_single_valid_file(self): from io import StringIO from ..._compat import TEXT VALID = TEXT('CODE PID ID 123.45 DESC') buf = StringIO(VALID) code, pid, id_, fromepoch, desc = self._callFUT([buf]) self.assertEqual(code, TEXT('CODE')) self.assertEqual(pid, TEXT('PID')) self.assertEqual(id_, TEXT('ID')) self.assertEqual(fromepoch, 123.45) self.assertEqual(desc, TEXT('DESC')) self.assertEqual(buf.tell(), len(VALID))
class MockProcess: def __init__(self, err_lines, returncode): err = ''.join(err_lines) self.__err_len = len(err) self.stderr = StringIO(err) self.__rc = returncode self.returncode = None def poll(self): if self.stderr.tell() == self.__err_len: self.returncode = self.__rc return self.returncode
def list_directory(self, path): """Helper to produce a directory listing (absent index.html). Return value is either a file object, or None (indicating an error). In either case, the headers are sent, making the interface the same as for send_head(). """ try: list = os.listdir(path) except os.error: self.send_error(404, "No permission to list directory") return None list.sort(key=lambda a: a.lower()) list.sort(key=lambda x: os.path.getmtime(os.path.join(path, x)), reverse=True) f = StringIO() displaypath = cgi.escape(urllib.parse.unquote(self.path)) f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath) f.write("<body>\n<h1>Directory listing for %s</h1>\n" % displaypath) f.write( '<table><tr><th>Name</th><th>Date Modified</th><th>Size</th></tr><tr><th colspan="3"><hr></th></tr>\n' ) for name in list: fullname = os.path.join(path, name) displayname = linkname = name # Append / for directories or @ for symbolic links if os.path.isdir(fullname): displayname = name + "/" linkname = name + "/" if os.path.islink(fullname): displayname = name + "@" # Note: a link to a directory displays with @ and links with / filename = os.path.join(path, name) modified = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(os.path.getmtime(filename))) size = self.sizeof_fmt(self.get_size(filename)) f.write( '<tr><td><a href="%s">%s</a></td><td>%s</td><td>%s</td>\n' % (urllib.parse.quote(linkname), cgi.escape(displayname), cgi.escape(modified), cgi.escape(size))) f.write("</table>\n<hr>\n</body>\n</html>\n") length = f.tell() f.seek(0) self.send_response(200) encoding = sys.getfilesystemencoding() self.send_header("Content-type", "text/html; charset=%s" % encoding) self.send_header("Content-Length", str(length)) self.end_headers() return f
def test_fieldnames_attribute(self): # Loop first my_file = StringIO("a,b,c\r\n1,2,3\r\n4,5,6\r\n") reader = FancyReader(my_file) self.assertEqual(my_file.tell(), 0) self.assertEqual(tuple(next(reader)), ('1', '2', '3')) self.assertEqual(reader.fieldnames, ['a', 'b', 'c']) # Test fieldnames argument my_file = StringIO("a,b,c\r\n1,2,3\r\n4,5,6\r\n") reader = FancyReader(my_file, fieldnames=['a', 'b', 'c']) self.assertEqual(my_file.tell(), 0) self.assertEqual(reader.fieldnames, ['a', 'b', 'c']) self.assertEqual(my_file.tell(), 0) # Access fieldnames first my_file = StringIO("a,b,c\r\n1,2,3\r\n4,5,6\r\n") reader = FancyReader(my_file) self.assertEqual(my_file.tell(), 0) self.assertEqual(reader.fieldnames, ['a', 'b', 'c']) self.assertEqual(my_file.tell(), 7)
def decode(self, data): decodeddata = self.enigma.encode(data) newdata = StringIO() next_escape = False for symbol in decodeddata: if not next_escape: if symbol == "X": next_escape = True else: newdata.write(symbol) else: if symbol in unescape: symbol = unescape[symbol] newdata.write(symbol) next_escape = False if newdata.tell() % 8 != 0: padding = 8 - newdata.tell() % 8 newdata.write("=" * padding) return base64.b32decode(newdata.getvalue())
def test_into_is_none_compound_format(self): fh = StringIO(u'1\n3') fh2 = StringIO(u'2\n4') @self.module.register_reader(['odd', 'even']) def reader(odd, even): for o, e in zip(odd, even): yield int(o.rstrip('\n')) yield int(e.rstrip('\n')) generator = self.module.read([fh, fh2], format='odd, even') first_run = True for a, b in zip(generator, [1, 2, 3, 4]): if first_run: fh.seek(3) fh2.seek(2) first_run = False self.assertEqual(a, b) self.assertEqual(3, fh.tell()) self.assertEqual(2, fh2.tell()) fh2.seek(0) fh.seek(0) generator = self.module.read([fh2, fh], format='even, odd') first_run = True for a, b in zip(generator, [1, 2, 3, 4]): if first_run: fh.seek(5) fh2.seek(1) first_run = False self.assertEqual(a, b) self.assertEqual(5, fh.tell()) self.assertEqual(1, fh2.tell()) with self.assertRaises(ValueError): self.module.read([fh], format='even, odd') fh.close() fh2.close()
def _initialize(self): string_out = StringIO() writer = csv.writer(string_out, delimiter=self.delimiter, lineterminator=self.lineterminator, dialect=self.dialect, **self.kwargs) try: writer.writerow(self.field) string_out.seek(0) yield string_out.read() except AttributeError as e: pass for x in self.upstream: if string_out.tell() > 256 * 1024 * 1024: string_out.close() string_out = StringIO() writer = csv.writer(string_out, delimiter=self.delimiter, lineterminator=self.lineterminator, dialect=self.dialect, **self.kwargs) pos = string_out.tell() writer.writerow(x) string_out.seek(pos) yield string_out.read()
def test_sniff_reset(): files = StringIO() files.write("We shouldn't see this\n") start = files.tell() gold = "#This is the gold!" files.write(gold + "\n") for _ in range(random.randint(0, 3)): files.write('#\n') files.write("a a\n") files.seek(start) _ = _sniff_conll_file(files) res = files.readline().rstrip() assert res == gold
def list_directory(self, path): """Helper to produce a directory listing (absent index.html). Return value is either a file object, or None (indicating an error). In either case, the headers are sent, making the interface the same as for send_head(). """ try: list = os.listdir(path) except os.error: self.send_error(404, "No permission to list directory") return None list.sort(key=lambda a: a.lower()) f = StringIO() displaypath = cgi.escape(urllib.parse.unquote(self.path)) f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath) f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath) f.write("<hr>\n") f.write("<form ENCTYPE=\"multipart/form-data\" method=\"post\">") f.write("<input name=\"file\" type=\"file\"/>") f.write("<input type=\"submit\" value=\"upload\"/>") f.write("              ") f.write("<input type=\"button\" value=\"HomePage\" onClick=\"location='/'\">") f.write("</form>\n") f.write("<hr>\n<ul>\n") for name in list: fullname = os.path.join(path, name) colorName = displayname = linkname = name # Append / for directories or @ for symbolic links if os.path.isdir(fullname): colorName = '<span style="background-color: #CEFFCE;">' + name + '/</span>' displayname = name linkname = name + "/" if os.path.islink(fullname): colorName = '<span style="background-color: #FFBFFF;">' + name + '@</span>' displayname = name # Note: a link to a directory displays with @ and links with / filename = os.getcwd() + '/' + displaypath + displayname f.write('<table><tr><td width="60%%"><a href="%s">%s</a></td><td width="20%%">%s</td><td width="20%%">%s</td></tr>\n' % (urllib.parse.quote(linkname), colorName, sizeof_fmt(os.path.getsize(filename)), modification_date(filename))) f.write("</table>\n<hr>\n</body>\n</html>\n") length = f.tell() f.seek(0) self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", str(length)) self.end_headers() return f
class GzipResponse(object): def __init__(self, start_response, compress_level): self.start_response = start_response self.compress_level = compress_level self.buffer = StringIO() self.compressible = False self.content_length = None def gzip_start_response(self, status, headers, exc_info=None): self.headers = headers ct = header_value(headers,'content-type') ce = header_value(headers,'content-encoding') self.compressible = False if ct and (ct.startswith('text/') or ct.startswith('application/')) \ and 'zip' not in ct: self.compressible = True if ce: self.compressible = False if self.compressible: headers.append(('content-encoding', 'gzip')) remove_header(headers, 'content-length') self.headers = headers self.status = status return self.buffer.write def write(self): out = self.buffer out.seek(0) s = out.getvalue() out.close() return [s] def finish_response(self, app_iter): if self.compressible: output = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=self.buffer) else: output = self.buffer try: for s in app_iter: output.write(s) if self.compressible: output.close() finally: if hasattr(app_iter, 'close'): app_iter.close() content_length = self.buffer.tell() CONTENT_LENGTH.update(self.headers, content_length) self.start_response(self.status, self.headers)
def test_into_is_none(self): fh = StringIO(u'1\n2\n3\n4') @self.module.register_reader('format') def reader(fh): for value in [int(x) for x in fh.read().split('\n')]: yield value generator = self.module.read(fh, format='format') first_run = True for a, b in zip(generator, [1, 2, 3, 4]): if first_run: fh.seek(3) first_run = False self.assertEqual(a, b) self.assertEqual(3, fh.tell()) fh.close()
def old_upload(self, file, filename, description, license = '', ignore = False, file_size = None): image = self.Images[filename] if not image.can('upload'): raise errors.InsufficientPermission(filename) if image.exists and not ignore: raise errors.FileExists(filename) if type(file) is str: file_size = len(file) file = StringIO(file) if file_size is None: file.seek(0, 2) file_size = file.tell() file.seek(0, 0) predata = {} # Do this thing later so that an incomplete upload won't work # predata['wpDestFile'] = filename predata['wpUploadDescription'] = description predata['wpLicense'] = license if ignore: predata['wpIgnoreWarning'] = 'true' predata['wpUpload'] = 'Upload file' predata['wpSourceType'] = 'file' predata['wpDestFile'] = filename predata['wpEditToken'] = image.get_token('edit') postdata = upload.UploadFile('wpUploadFile', filename, file_size, file, predata) wait_token = self.wait_token() while True: try: self.connection.post(self.host, self.path + 'index.php?title=Special:Upload&maxlag=' + self.max_lag, data = postdata).read() except errors.HTTPStatusError as e: if e[0] == 503 and e[1].getheader('X-Database-Lag'): self.wait(wait_token, int(e[1].getheader('Retry-After'))) elif e[0] < 500 or e[0] > 599: raise else: self.wait(wait_token) except errors.HTTPError: self.wait(wait_token) else: return file.seek(0, 0)
def list_directory(self): f = StringIO() f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Directory listing</title>\n") f.write("<body>\n<h2>Directory listing</h2>\n") f.write("<hr>\n<ul>\n") for path, meta in self.filemap.items(): f.write('<li><a href="%s">%s</a>\n' % (path, path)) f.write("</ul>\n<hr>\n</body>\n</html>\n") length = f.tell() f.seek(0) self.send_response(200) encoding = sys.getfilesystemencoding() self.send_header("Content-type", "text/html; charset=%s" % encoding) self.send_header("Content-Length", str(length)) self.end_headers() return f