def scan(path): """ Performs an in-process binary module scan. That means the module is loaded (imported) into the current Python interpreter. "path" - a path to a binary module to scan Returns a CIX 2.0 XML string. """ from gencix.python import gencixcore as gencix name, _ = os.path.splitext(os.path.basename(path)) dir = os.path.dirname(path) root = gencix.Element('codeintel', version='2.0', name=name) gencix.docmodule(name, root, usefile=True, dir=dir) gencix.perform_smart_analysis(root) gencix.prettify(root) tree = gencix.ElementTree(root) stream = StringIO() try: stream.write('<?xml version="1.0" encoding="UTF-8"?>\n') tree.write(stream) return stream.getvalue() finally: stream.close()
def generate_roles_data_from_directory(directory, roles, validate=True): """Generate a roles data file using roles from a local path :param directory local filesystem path to the roles :param roles ordered list of roles :param validate validate the metadata format in the role yaml files :returns string contents of the roles_data.yaml """ available_roles = get_roles_list_from_directory(directory) check_role_exists(available_roles, roles) output = StringIO() header = ["#" * 79, "# File generated by TripleO", "#" * 79, ""] output.write("\n".join(header)) for role in roles: defined_role = role.split(':')[0] file_path = os.path.join(directory, "{}.yaml".format(defined_role)) if validate: validate_role_yaml(role_path=file_path) with open(file_path, "r") as f: if ':' in role: generated_role = role.split(':')[1] content = generate_role_with_colon_format(f.read(), defined_role, generated_role) output.write(content) else: shutil.copyfileobj(f, output) return output.getvalue()
class CagedPrompt(code.InteractiveConsole): def __init__(self): env = {'__name__': '__main__'} code.InteractiveConsole.__init__(self, env) def run(self, input, banner=True): self.out = StringIO() self.inlines = textwrap.dedent(input).splitlines() old_stdout = sys.stdout sys.stdout = self.out if banner: banner_text = "Python " + sys.version.split("[")[0] else: banner_text = "" self.interact(banner_text, exitmsg="") sys.stdout = old_stdout self.output = self.out.getvalue() def raw_input(self, prompt): try: line = self.inlines.pop(0) except IndexError: raise EOFError if line or prompt == sys.ps2: self.write("%s%s\n" % (prompt, line)) else: self.write("\n") return line def write(self, data): self.out.write(data)
def _decompress_xz(filename): """Eumlates an option function in read mode for xz. See the comment in _compress_xz for more information. This function tries to emulate the lzma module as much as possible """ if not filename.endswith('.xz'): filename = '{}.xz'.format(filename) try: with open(os.devnull, 'w') as null: string = subprocess.check_output( ['xz', '--decompress', '--stdout', filename], stderr=null) except OSError as e: if e.errno == errno.ENOENT: raise exceptions.PiglitFatalError( 'No xz binary available') raise # We need a file-like object, so the contents must be placed in # a StringIO object. io = StringIO() io.write(string) io.seek(0) yield io io.close()
class _MiniPPrinter(object): def __init__(self): self._out = StringIO() self.indentation = 0 def text(self, text): self._out.write(text) def breakable(self, sep=" "): self._out.write(sep) def begin_group(self, _, text): self.text(text) def end_group(self, _, text): self.text(text) def pretty(self, obj): if hasattr(obj, "_repr_pretty_"): obj._repr_pretty_(self, False) else: self.text(repr(obj)) def getvalue(self): return self._out.getvalue()
def __str__(self): from six.moves import cStringIO as StringIO temp_file = StringIO() for order in self: temp_file.write("%s\n" % str(order)) #temp_file.write("%s\n" % str(self.head_order)) return temp_file.getvalue()
def _parse_operator(segment, iterator): """Parses the operator (eg. '==' or '<').""" stream = StringIO() for character in iterator: if character == constants.NEGATION[1]: if stream.tell(): # Negation can only occur at the start of an operator. raise ValueError('Unexpected negation.') # We've been negated. segment.negated = not segment.negated continue if (stream.getvalue() + character not in OPERATOR_SYMBOL_MAP and stream.getvalue() + character not in OPERATOR_BEGIN_CHARS): # We're no longer an operator. break # Expand the operator stream.write(character) # Check for existance. text = stream.getvalue() if text not in OPERATOR_SYMBOL_MAP: # Doesn't exist because of a mis-placed negation in the middle # of the path. raise ValueError('Unexpected negation.') # Set the found operator. segment.operator = OPERATOR_SYMBOL_MAP[text] # Return the remaining characters. return chain(character, iterator)
def input_items(input): def buf2elt(buf): buf.seek(0, 0) elt = None try: et = ElementTree.parse(buf) elt = et.getroot() except xml_error as e: elt = None warn("ignoring XML error: %s" % e) buf.close() return elt buf = None bufpos = None for (line, linepos) in lines_positions(input): if line.startswith('<?xml '): if buf is not None: yield (buf2elt(buf), bufpos) buf = StringIO() bufpos = None else: if buf: # this lets us start anywhere and pick up the next record if bufpos is None: bufpos = linepos buf.write(line) if buf is not None: yield (buf2elt(buf), bufpos)
def bind_config(self): """ Return the BIND configuration for this record. """ result = StringIO() for value in self.values: result.write("%-63s %7d IN %-7s " % ( self.name, self.ttl if self.ttl is not None else 60, self.record_type)) if self.record_type == "SOA": parts = value.split() ns = parts[0] email = parts[1] soa_values = parts[2:] result.write("%s %s (%s)" % (ns, email, " ".join(soa_values))) elif self.record_type == "NS": if "." in value and not value.endswith("."): # Some Route 53 NS records are missing the terminating . value += "." result.write(value) else: result.write(value) result.write("\n") return result.getvalue()
class IncludedResponse(object): def __init__(self): self.headers = None self.status = None self.output = StringIO() self.str = None def close(self): self.str = self.output.getvalue() self.output.close() self.output = None def write(self, s): assert self.output is not None, ( "This response has already been closed and no further data " "can be written.") self.output.write(s) def __str__(self): return self.body def body__get(self): if self.str is None: return self.output.getvalue() else: return self.str body = property(body__get)
def generate_index(self): display_name = escape_html(self.dir_name if self.dir_name else "/") html = StringIO() html.write(self.header % locals()) # If this is a subdirectory, create a link back to the parent. if self.dir_name: parent_dirname = ("/" + self.dir_name).rsplit("/", 1)[0] html.write(self.parent_backlink % locals()) for subdir_name, subdir in sorted(iteritems(self.subdirs)): subdir_link = escape_html(url_quote( self.dir_name + "/" + subdir_name if self.dir_name else subdir_name)) subdir_name = escape_html(subdir_name) html.write(self.subdir_link % locals()) for filename, key in sorted(iteritems(self.contents)): ext = splitext(filename)[-1] icon_name = self.icons.get(ext, "binary.png") suffix_type = self.suffix_types.get(ext, " ") file_link = escape_html(url_quote(key.name)) filename = escape_html(filename) last_modified = escape_html(key.last_modified) size = str(key.size) description = "" html.write(self.file_link % locals()) html.write(self.footer % locals()) return html.getvalue()
def scan(path): """ Performs an in-process binary module scan. That means the module is loaded (imported) into the current Python interpreter. "path" - a path to a binary module to scan Returns a CIX 2.0 XML string. """ from gencix.python import gencixcore as gencix name,_ = os.path.splitext(os.path.basename(path)) dir = os.path.dirname(path) root = gencix.Element('codeintel', version='2.0', name=name) gencix.docmodule(name, root, usefile=True, dir=dir) gencix.perform_smart_analysis(root) gencix.prettify(root) tree = gencix.ElementTree(root) stream = StringIO() try: stream.write('<?xml version="1.0" encoding="UTF-8"?>\n') tree.write(stream) return stream.getvalue() finally: stream.close()
def _openURL(self, url): """Download content from url, read filename and content_type return file_object, filename, content_type tuple """ # Quote path part of url url = reencodeUrlEscapes(url) # build a new file from the url url_file = urllib.request.urlopen( urllib.request.Request(url, headers={'Accept': '*/*'})) data = url_file.read() # time out must be set or ... too long XXX file_object = StringIO() file_object.write(data) file_object.seek(0) # if a content-disposition header is present, # try first to read the suggested filename from it. header_info = url_file.info() content_disposition = header_info.getheader('content-disposition', '') filename = parse_header(content_disposition)[1].get('filename') if not filename: # Now read the filename from url. # In case of http redirection, the real url must be read # from file object returned by urllib2.urlopen. # It can happens when the header 'Location' is present in request. # See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.30 url = url_file.geturl() # Create a file name based on the URL and quote it filename = urllib.parse.urlsplit(url)[-3] filename = os.path.basename(filename) filename = urllib.parse.quote(filename, safe='') # pylint:disable=redundant-keyword-arg filename = filename.replace('%', '') content_type = header_info.gettype() return file_object, filename, content_type
def __str__(self): from six.moves import cStringIO as StringIO file_str = StringIO() for order in self: file_str.write("%s\n" % str(order)) return file_str.getvalue()
def generate_roles_data_from_directory(directory, roles, validate=True): """Generate a roles data file using roles from a local path :param directory local filesystem path to the roles :param roles ordered list of roles :param validate validate the metadata format in the role yaml files :returns string contents of the roles_data.yaml """ available_roles = get_roles_list_from_directory(directory) check_role_exists(available_roles, roles) output = StringIO() header = ["#" * 79, "# File generated by TripleO", "#" * 79, ""] output.write("\n".join(header)) for role in roles: defined_role = role.split(':')[0] file_path = os.path.join(directory, "{}.yaml".format(defined_role)) if validate: validate_role_yaml(role_path=file_path) with open(file_path, "r") as f: if ':' in role: generated_role = role.split(':')[1] content = generate_role_with_colon_format( f.read(), defined_role, generated_role) output.write(content) else: shutil.copyfileobj(f, output) return output.getvalue()
def bind_config(self): """ Return the BIND configuration for this record. """ result = StringIO() for value in self.values: result.write("%-63s %7d IN %-7s " % (self.name, self.ttl if self.ttl is not None else 60, self.record_type)) if self.record_type == "SOA": parts = value.split() ns = parts[0] email = parts[1] soa_values = parts[2:] result.write("%s %s (%s)" % (ns, email, " ".join(soa_values))) elif self.record_type == "NS": if "." in value and not value.endswith("."): # Some Route 53 NS records are missing the terminating . value += "." result.write(value) else: result.write(value) result.write("\n") return result.getvalue()
def generate_ssl_cert(target_file=None, overwrite=False, random=False, return_content=False, serial_number=None): # Note: Do NOT import "OpenSSL" at the root scope # (Our test Lambdas are importing this file but don't have the module installed) from OpenSSL import crypto if target_file and not overwrite and os.path.exists(target_file): key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file return target_file, cert_file_name, key_file_name if random and target_file: if '.' in target_file: target_file = target_file.replace('.', '.%s.' % short_uid(), 1) else: target_file = '%s.%s' % (target_file, short_uid()) # create a key pair k = crypto.PKey() k.generate_key(crypto.TYPE_RSA, 1024) # create a self-signed cert cert = crypto.X509() subj = cert.get_subject() subj.C = 'AU' subj.ST = 'Some-State' subj.L = 'Some-Locality' subj.O = 'LocalStack Org' # noqa subj.OU = 'Testing' subj.CN = 'LocalStack' serial_number = serial_number or 1001 cert.set_serial_number(serial_number) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60) cert.set_issuer(cert.get_subject()) cert.set_pubkey(k) cert.sign(k, 'sha1') cert_file = StringIO() key_file = StringIO() cert_file.write(to_str(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))) key_file.write(to_str(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))) cert_file_content = cert_file.getvalue().strip() key_file_content = key_file.getvalue().strip() file_content = '%s\n%s' % (key_file_content, cert_file_content) if target_file: save_file(target_file, file_content) key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file save_file(key_file_name, key_file_content) save_file(cert_file_name, cert_file_content) TMP_FILES.append(target_file) TMP_FILES.append(key_file_name) TMP_FILES.append(cert_file_name) if not return_content: return target_file, cert_file_name, key_file_name return file_content
def ask_book_str(self): # Efficient string concat file_str = StringIO() file_str.write("------- Asks --------\n") if self.asks != None and len(self.asks) > 0: for k, v in self.asks.price_tree.items(): file_str.write("%s" % v) return file_str.getvalue()
def bid_book_str(self): # Efficient string concat file_str = StringIO() file_str.write("------- Bids --------\n") if self.bids != None and len(self.bids) > 0: for k, v in self.bids.price_tree.items(reverse=True): file_str.write("%s" % v) return file_str.getvalue()
def vmd_gaussian_rep(chainids, coloring, rep_counter, rep_number, i): """ creates rep-string for gaussian domains is meant to make it easier to include more coloring schemes. Parameters: ----------- chainids : string_array List of Strings. Get it from a yaml dictionary. is passed on coloring : string Chooser for the coloring scheme: random, or domain rep_counter : Integer These are counters/indices for outer loop rep_number : Integer These are counters/indices for outer loop i: Integer These are counters/indices for outer loop Returns: -------- rep : string updated counters : int """ out = StringIO() if coloring == "domain": colorid = 0 elif coloring == "random": colorid = i % 32 else: raise ValueError( "Coloring method '{}' not supported for gaussian domain".format( coloring)) rep_type = "Trace" rep_param = "0.300000 12.000000" out.write( ADDREP_FORMAT.format( colorid=colorid, rep_number=rep_number, mol_number=0, resid_string=vmd_selection_string_trace(chainids), rep_type=rep_type, rep_param=rep_param, )) rep_counter += 1 rep_type = "VDW" rep_param = "1.000000 12.000000" rep_number = rep_counter out.write( ADDREP_FORMAT.format( colorid=colorid, rep_number=rep_number, mol_number=0, resid_string=vmd_selection_string(chainids), rep_type=rep_type, rep_param=rep_param, )) rep = out.getvalue() return rep, rep_counter + 1
def write_default_ansible_cfg(work_dir, remote_user, ssh_private_key=None, transport=None, base_ansible_cfg='/etc/ansible/ansible.cfg', override_ansible_cfg=None): ansible_config_path = os.path.join(work_dir, 'ansible.cfg') shutil.copy(base_ansible_cfg, ansible_config_path) config = configparser.ConfigParser() config.read(ansible_config_path) config.set('defaults', 'retry_files_enabled', 'False') log_path = os.path.join(work_dir, 'ansible.log') config.set('defaults', 'log_path', log_path) if os.path.exists(log_path): new_path = (log_path + '-' + datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) os.rename(log_path, new_path) config.set('defaults', 'forks', '25') config.set('defaults', 'timeout', '30') config.set('defaults', 'gather_timeout', '30') # mistral user has no home dir set, so no place to save a known hosts file config.set( 'ssh_connection', 'ssh_args', '-o UserKnownHostsFile=/dev/null ' '-o StrictHostKeyChecking=no ' '-o ControlMaster=auto ' '-o ControlPersist=30m ' '-o ServerAliveInterval=5 ' '-o ServerAliveCountMax=5') config.set('ssh_connection', 'control_path_dir', os.path.join(work_dir, 'ansible-ssh')) config.set('ssh_connection', 'retries', '8') config.set('ssh_connection', 'pipelining', 'True') # Set connection info in config file so that subsequent/nested ansible # calls can re-use it if remote_user: config.set('defaults', 'remote_user', remote_user) if ssh_private_key: config.set('defaults', 'private_key_file', ssh_private_key) if transport: config.set('defaults', 'transport', transport) if override_ansible_cfg: sio_cfg = StringIO() sio_cfg.write(override_ansible_cfg) sio_cfg.seek(0) config.readfp(sio_cfg) sio_cfg.close() with open(ansible_config_path, 'w') as configfile: config.write(configfile) return ansible_config_path
def test_error(self): sio = StringIO() sio.write("bogus") sio.seek(0) r = flow.FlowReader(sio) tutils.raises(flow.FlowReadError, list, r.stream()) f = flow.FlowReadError("foo") assert f.strerror == "foo"
def ask_book_aggregated_str(self): # Efficient string concat file_str = StringIO() file_str.write("------- Asks --------\n") if self.asks != None and len(self.asks) > 0: for k, v in self.asks.price_tree.items(): # aggregate file_str.write("%s\t@\t%.4f\n" % (v.volume, v.head_order.price / float(10000))) return file_str.getvalue()
def test_error(self): sio = StringIO() sio.write("bogus") sio.seek(0) r = flow.FlowReader(sio) tutils.raises(FlowReadException, list, r.stream()) f = FlowReadException("foo") assert str(f) == "foo"
def write(self, peerid, storage_index, shnum, offset, data): if peerid not in self._peers: self._peers[peerid] = {} shares = self._peers[peerid] f = StringIO() f.write(shares.get(shnum, "")) f.seek(offset) f.write(data) shares[shnum] = f.getvalue()
def bid_book_aggregated_str(self): # Efficient string concat file_str = StringIO() file_str.write("------- Bids --------\n") if self.bids != None and len(self.bids) > 0: for k, v in self.bids.price_tree.items(reverse=True): # aggregate file_str.write("%s\t@\t%.4f\n" % \ (v.volume, v.head_order.price / float(10**PRICE_PRECISION))) return file_str.getvalue()
def ask_book_aggregated_str(self): # Efficient string concat file_str = StringIO() file_str.write("------- Asks --------\n") if self.asks != None and len(self.asks) > 0: for k, v in self.asks.price_tree.items(): # aggregate file_str.write("%s\t@\t%.4f\n" % \ (v.volume, v.head_order.price / float(10000))) return file_str.getvalue()
def get_cix_string(cix, prettyFormat=True): # Get the CIX. if prettyFormat: prettify(cix) cixstream = StringIO() cixtree = ElementTree(cix) cixstream.write('<?xml version="1.0" encoding="UTF-8"?>\n') cixtree.write(cixstream) cixcontent = cixstream.getvalue() cixstream.close() return cixcontent
def _crypt_py2(cls, op, key, iv, data): cipher = EVP.Cipher(alg='aes_128_cbc', key=key, iv=iv, op=op, padding=False) inf = StringIO(data) outf = StringIO() while True: buf = inf.read() if not buf: break outf.write(cipher.update(buf)) outf.write(cipher.final()) return outf.getvalue()
def test_copy_from_cols(self): curs = self.conn.cursor() f = StringIO() for i in xrange(10): f.write("%s\n" % (i, )) f.seek(0) curs.copy_from(MinimalRead(f), "tcopy", columns=['id']) curs.execute("select * from tcopy order by id") self.assertEqual([(i, None) for i in range(10)], curs.fetchall())
def test_copy_from_cols(self): curs = self.conn.cursor() f = StringIO() for i in xrange(10): f.write("%s\n" % (i,)) f.seek(0) curs.copy_from(MinimalRead(f), "tcopy", columns=['id']) curs.execute("select * from tcopy order by id") self.assertEqual([(i, None) for i in range(10)], curs.fetchall())
def _format_stack(frame): """ Pretty-print the stack of `frame` like logging would. """ sio = StringIO() sio.write("Stack (most recent call last):\n") traceback.print_stack(frame, file=sio) sinfo = sio.getvalue() if sinfo[-1] == "\n": sinfo = sinfo[:-1] sio.close() return sinfo
def test_copy_from_cols_err(self): curs = self.conn.cursor() f = StringIO() for i in xrange(10): f.write("%s\n" % (i,)) f.seek(0) def cols(): raise ZeroDivisionError() yield 'id' self.assertRaises(ZeroDivisionError, curs.copy_from, MinimalRead(f), "tcopy", columns=cols())
def initialize_db(self, indices): db = self.connection() print("testing for tables") cursor = db.cursor() cursor.execute("SHOW TABLES from %s;" % self.params.mysql.database) all_tables = cursor.fetchall() # Beware of SQL injection vulnerability (here and elsewhere). new_tables = self.merging_schema_tables(self.params.mysql.runtag) for table in new_tables: cursor.execute("DROP TABLE IF EXISTS %s;" % table[0]) cursor.execute("CREATE TABLE %s " % table[0] + table[1].replace("\n", " ") + " ;") from six.moves import cStringIO as StringIO query = StringIO() query.write("INSERT INTO `%s_miller` (h,k,l) VALUES " % self.params.mysql.runtag) firstcomma = "" for item in indices: query.write(firstcomma) firstcomma = "," query.write("('%d','%d','%d')" % (item[0], item[1], item[2])) query.write(" ;") cursor.execute(query.getvalue()) db.commit()
def read(self, offset, length): """ s3handler.read(offset, length) -> str Read data from this volume from offset to offset + length. """ start_block, start_offset = divmod(offset, self.block_size) end_block, end_offset = divmod(offset + length, self.block_size) if end_offset == 0: end_block -= 1 end_offset = self.block_size result = StringIO() for block_id in range(start_block, end_block + 1): block_data = self.read_block(block_id) if block_id == start_block: # Trim the data to omit anything from before the start of the # read range. if block_id == end_block: # And after the end, in this case. result.write(block_data[start_offset:end_offset]) else: result.write(block_data[start_offset:]) elif block_id == end_block: result.write(block_data[:end_offset]) else: result.write(block_data) return result.getvalue()
def apply(self, obj, token, sm, url=None, result=None, top=1): if result is None: result = StringIO() url = urlfix(url, 'DELETE') url = urlbase(url) iscol = isDavCollection(obj) errmsg = None parent = aq_parent(obj) islockable = IWriteLock.providedBy(obj) if parent and (not sm.checkPermission(delete_objects, parent)): # User doesn't have permission to delete this object errmsg = "403 Forbidden" elif islockable and obj.wl_isLocked(): if token and obj.wl_hasLock(token): # Object is locked, and the token matches (no error) errmsg = "" else: errmsg = "423 Locked" if errmsg: if top and (not iscol): if errmsg == "403 Forbidden": raise Forbidden() if errmsg == "423 Locked": raise Locked() elif not result.getvalue(): # We haven't had any errors yet, so our result is empty # and we need to set up the XML header result.write('<?xml version="1.0" encoding="utf-8" ?>\n' '<d:multistatus xmlns:d="DAV:">\n') result.write('<d:response>\n <d:href>%s</d:href>\n' % url) result.write(' <d:status>HTTP/1.1 %s</d:status>\n' % errmsg) result.write('</d:response>\n') if iscol: for ob in obj.objectValues(): dflag = hasattr(ob, '_p_changed') and (ob._p_changed is None) if hasattr(ob, '__dav_resource__'): uri = urljoin(url, absattr(ob.getId())) self.apply(ob, token, sm, uri, result, top=0) if dflag: ob._p_deactivate() if not top: return result if result.getvalue(): # One or more subitems can't be delted, so close the multistatus # element result.write('</d:multistatus>\n') return result.getvalue()
def capture_output(environ, start_response, application): """ Runs application with environ and start_response, and captures status, headers, and body. Sends status and header, but *not* body. Returns (status, headers, body). Typically this is used like: .. code-block:: python def dehtmlifying_middleware(application): def replacement_app(environ, start_response): status, headers, body = capture_output( environ, start_response, application) content_type = header_value(headers, 'content-type') if (not content_type or not content_type.startswith('text/html')): return [body] body = re.sub(r'<.*?>', '', body) return [body] return replacement_app """ warnings.warn( 'wsgilib.capture_output has been deprecated in favor ' 'of wsgilib.intercept_output', DeprecationWarning, 2) data = [] output = StringIO() def replacement_start_response(status, headers, exc_info=None): if data: data[:] = [] data.append(status) data.append(headers) start_response(status, headers, exc_info) return output.write app_iter = application(environ, replacement_start_response) try: for item in app_iter: output.write(item) finally: if hasattr(app_iter, 'close'): app_iter.close() if not data: data.append(None) if len(data) < 2: data.append(None) data.append(output.getvalue()) return data
def capture_output(environ, start_response, application): """ Runs application with environ and start_response, and captures status, headers, and body. Sends status and header, but *not* body. Returns (status, headers, body). Typically this is used like: .. code-block:: python def dehtmlifying_middleware(application): def replacement_app(environ, start_response): status, headers, body = capture_output( environ, start_response, application) content_type = header_value(headers, 'content-type') if (not content_type or not content_type.startswith('text/html')): return [body] body = re.sub(r'<.*?>', '', body) return [body] return replacement_app """ warnings.warn( "wsgilib.capture_output has been deprecated in favor " "of wsgilib.intercept_output", DeprecationWarning, 2 ) data = [] output = StringIO() def replacement_start_response(status, headers, exc_info=None): if data: data[:] = [] data.append(status) data.append(headers) start_response(status, headers, exc_info) return output.write app_iter = application(environ, replacement_start_response) try: for item in app_iter: output.write(item) finally: if hasattr(app_iter, "close"): app_iter.close() if not data: data.append(None) if len(data) < 2: data.append(None) data.append(output.getvalue()) return data
def vmd_rep_gen(topology, coloring): """Goes through the given topology and creates representations from that. The domains will be in a certain coloring scheme (specified by COLORING input-param). Different domain types will be shown in different rep_types by vmd. Parameter: ---------- topology : dictionary Dictionary from the cplx read by yaml coloring : string random or domain. Parsed as input argument for choosing color-scheme Returns: -------- string The string containing all the interpretable stuff for vmd to create the representations (also HEADER) """ out = StringIO() bead_counter = 1 rep_counter = 0 name2type = get_type_translater(topology["definitions"]) for j, top in enumerate(topology["topologies"]): domains = top["domains"] for i, dom in six.iteritems(domains): chainids = dom["chain-ids"] rep_number = rep_counter first_bead = bead_counter last_bead = first_bead + dom["nbeads"] bead_counter = last_bead + 1 dom_rep, rep_counter = vmd_domain_rep( name2type[dom["type"]], chainids, coloring, rep_counter, rep_number, i, dom, ) out.write(dom_rep) # The Following has to be the LAST included instruction in the file !! # Because otherwise vmd will automatically generate an additional # representation including all atoms, displaying them as lines. out.write(DEL_REP_FORMAT.format(rep_counter=rep_counter)) concatenated_string = out.getvalue() out.close() return concatenated_string
def _copy_from(self, curs, nrecs, srec, copykw): f = StringIO() for i, c in izip(xrange(nrecs), cycle(string.ascii_letters)): l = c * srec f.write("%s\t%s\n" % (i,l)) f.seek(0) curs.copy_from(MinimalRead(f), "tcopy", **copykw) curs.execute("select count(*) from tcopy") self.assertEqual(nrecs, curs.fetchone()[0]) curs.execute("select data from tcopy where id < %s order by id", (len(string.ascii_letters),)) for i, (l,) in enumerate(curs): self.assertEqual(l, string.ascii_letters[i] * srec)
def write(self, content, html=None, table=None, code=None): '''Set clipboard content. content: the object which will be put onto the clipboard. html: BOOL. Whether the content is rich text coded in HTML. Default: False table: BOOL. Whether the content is a table. Default: False code: string. The coding of the content text.''' if table: from wavesynlib.languagecenter.html.utils import iterable_to_table html = True content = iterable_to_table(content) if (not html) and (not code) : super().write(content) else: stream = StringIO() stream.write(content) stream.seek(0) clipb.stream_to_clipboard(stream, mode=None, code=code, tee=None, null=None, html=html)
def parse(text, encoding='utf8'): """Parse the querystring into a normalized form.""" # Initialize the query object. query = Query() # Decode the text if we got bytes. if isinstance(text, six.binary_type): text = text.decode(encoding) # Iterate through the characters in the query string; one-by-one # in order to perform one-pass parsing. stream = StringIO() for character in text: # We want to stop reading the query and pass it off to someone # when we reach a logical or grouping operator. if character in (constants.LOGICAL_AND, constants.LOGICAL_OR): if not stream.tell(): # There is no content in the stream; a logical operator # was found out of place. raise ValueError('Found `{}` out of place'.format( character)) # Parse the segment up till the combinator segment = parse_segment(stream.getvalue(), character) query.segments.append(segment) stream.truncate(0) stream.seek(0) else: # This isn't a special character, just roll with it. stream.write(character) # TODO: Throw some nonsense here if the query string ended with a # & or ;, because that makes no sense. if stream.tell(): # Append the remainder of the query string. query.segments.append(parse_segment(stream.getvalue())) # Return the constructed query object. return query
def marshall(self, obj, marshaller): REQUEST = obj.REQUEST RESPONSE = REQUEST.RESPONSE ddata = marshaller.marshall(obj, REQUEST=REQUEST, RESPONSE=RESPONSE) if hasattr(aq_base(obj), 'marshall_hook') \ and obj.marshall_hook: ddata = obj.marshall_hook(ddata) content_type, length, data = ddata if isinstance(data, six.string_types): return StringIO(data) s = StringIO() while data is not None: s.write(data.data) data = data.next s.seek(0) return s
def trade_book_str(self): # Efficient string concat file_str = StringIO() file_str.write("------ Trades ------\n") if self.trades != None and len(self.trades) > 0: num = 0 for entry in self.trades: if num < 10: file_str.write( str(entry.qty) + " @ " + "%f" % (entry.price / float(10000)) + " (" + str(entry.timestamp) + ")\n" ) num += 1 else: break return file_str.getvalue()
def make_string_buffer(string): """Returns a readable/writeable file-like object, containing string. >>> f = make_string_buffer(u'text') >>> print(f.read()) text If the string is a bytestring, then the returned object will operate in binary mode. >>> f = make_string_buffer(b'bytes') >>> f.read() == b'bytes' True """ if isinstance(string, six.text_type): buf = StringIO() else: buf = io.BytesIO() buf.write(string) buf.seek(0) return buf
def test_copy_from_with_fks(self): curs = self.conn.cursor() curs.execute(''' CREATE TEMPORARY TABLE tcopy_ref ( id serial, FOREIGN KEY(id) REFERENCES tcopy ) ''') f = StringIO() f.write("%s\t%s\n" % (1, 'b')) f.seek(0) curs.copy_from(MinimalRead(f), "tcopy") g = StringIO() g.write("%s\n" % (2)) g.seek(0) self.assertRaises(exceptions.OperationalError, curs.copy_from, MinimalRead(g), "tcopy_ref")
def __str__(self): o = StringIO() o.write('(') for index, segment in enumerate(self): o.write(str(segment)) if index + 1 < len(self): comb = '&' if segment.combinator == operator.and_ else '|' o.write(' {} '.format(comb)) o.write(')') return o.getvalue()
def __str__(self): """ Format this query segment in a human-readable representation intended for debugging. """ o = StringIO() if self.negated: o.write('not ') o.write('.'.join(self.path)) if self.values: o.write(' :%s ' % self.operator) o.write(' | '.join(map(repr, self.values))) return o.getvalue()