def edit(D, text): L = '\r\n' K = 'utf-8-sig' A = text import tempfile as H A = A or '' E = type(A) in [bytes, bytearray] if not E and A and not A.endswith(_D): A += _D I, B = H.mkstemp(prefix='editor-', suffix=D.extension) try: if not E: if WIN: F = K A = A.replace(_D, L) else: F = 'utf-8' A = A.encode(F) C = os.fdopen(I, 'wb') C.write(A) C.close() J = os.path.getmtime(B) D.edit_file(B) if D.require_save and os.path.getmtime(B) == J: return _A C = open(B, 'rb') try: G = C.read() finally: C.close() if E: return G else: return G.decode(K).replace(L, _D) finally: os.unlink(B)
def testTrollitaireConverter(self): """Test that the Trollitaire converter works properly.""" # First test case (no surprises) tempfile = cStringIO.StringIO() with open(old_troll_file) as infile: trollconvert.convert_draft_file(infile, tempfile) expected = open(converted_troll).read() tempfile.seek(0) result = tempfile.read() self.assertEqual(expected, result) # Second test case (has an UNDO) tempfile = cStringIO.StringIO() with open(old_troll_file2) as infile: trollconvert.convert_draft_file(infile, tempfile) expected = open(converted_troll2).read() tempfile.seek(0) result = tempfile.read() self.assertEqual(expected, result)
def _load_db_to_memory(in_db_path): # Read database to tempfile conn = sqlite3.connect('file:%s?mode=ro' % in_db_path, uri=True) tempfile = io.StringIO() for line in conn.iterdump(): tempfile.write('%s\n' % line) conn.close() tempfile.seek(0) # Create a database in memory and import from tempfile conn = sqlite3.connect(":memory:") conn.cursor().executescript(tempfile.read()) return conn
def _copy_temp_to_perm(self): # This is a really stupid way to do this. # FIXME: abstract out the duplicated code, lazybones. if self.gsettings.expert is False: permfile = open(self.filesinkpath, 'wb') self.tempfile.close() self.tempfile = open(self.temppath, 'rb') permfile.write(self.tempfile.read()) permfile.close() else: for i in (1, 2): permfile = open(self.filesinkpath + '-' + str(i) + '.wav', 'wb') tf = eval('self.tempfile' + str(i)) tf.close() tempfile = open(eval('self.temppath' + str(i)), 'rb') permfile.write(tempfile.read()) permfile.close()
def run(self, args, trust_dir, stdinput=None, username_passwd=None): """ Runs the notary client in a subprocess, and returns the output """ command = self.client + ["-d", trust_dir] + list(args) print("$ " + " ".join(command)) # username password require newlines - EOF doesn't seem to do it. communicate_input = (tuple((x + "\n" for x in self.username_passwd)) if username_passwd is None else username_passwd) # Input comes before the username/password, and if there is a username # and password, we need a newline after the input. Otherwise, just use # EOF (for instance if we're piping text to verify) if stdinput is not None: if communicate_input: communicate_input = (stdinput + "\n", ) + communicate_input else: communicate_input = (stdinput, ) _, filename = mkstemp() with open(filename, 'wb') as tempfile: process = Popen(command, env=self.env, stdout=tempfile, stdin=PIPE, universal_newlines=True) # communicate writes once then closes stdin for the process process.communicate("".join(communicate_input)) process.wait() with open(filename) as tempfile: output = tempfile.read() retcode = process.poll() cleanup(filename) print(output) if retcode: raise CalledProcessError(retcode, command, output=output) return output
def run(self, args, trust_dir, stdinput=None, username_passwd=None): """ Runs the notary client in a subprocess, and returns the output """ command = self.client + ["-d", trust_dir] + list(args) print("$ " + " ".join(command)) # username password require newlines - EOF doesn't seem to do it. communicate_input = (tuple((x + "\n" for x in self.username_passwd)) if username_passwd is None else username_passwd) # Input comes before the username/password, and if there is a username # and password, we need a newline after the input. Otherwise, just use # EOF (for instance if we're piping text to verify) if stdinput is not None: if communicate_input: communicate_input = (stdinput + "\n",) + communicate_input else: communicate_input = (stdinput,) _, filename = mkstemp() with open(filename, 'wb') as tempfile: process = Popen(command, env=self.env, stdout=tempfile, stdin=PIPE, universal_newlines=True) # communicate writes once then closes stdin for the process process.communicate("".join(communicate_input)) process.wait() with open(filename) as tempfile: output = tempfile.read() retcode = process.poll() cleanup(filename) print(output) if retcode: raise CalledProcessError(retcode, command, output=output) return output
def main(path_to_debug_info=None, gdb_argv=None, no_import=False): """ Start the Cython debugger. This tells gdb to import the Cython and Python extensions (libcython.py and libpython.py) and it enables gdb's pending breakpoints. path_to_debug_info is the path to the Cython build directory gdb_argv is the list of options to gdb no_import tells cygdb whether it should import debug information """ parser = optparse.OptionParser(usage=usage) parser.add_option("--gdb-executable", dest="gdb", default='gdb', help="gdb executable to use [default: gdb]") parser.add_option( "--verbose", "-v", dest="verbosity", action="count", default=0, help="Verbose mode. Multiple -v options increase the verbosity") (options, args) = parser.parse_args() if path_to_debug_info is None: if len(args) > 1: path_to_debug_info = args[0] else: path_to_debug_info = os.curdir if gdb_argv is None: gdb_argv = args[1:] if path_to_debug_info == '--': no_import = True logging_level = logging.WARN if options.verbosity == 1: logging_level = logging.INFO if options.verbosity >= 2: logging_level = logging.DEBUG logging.basicConfig(level=logging_level) logger.info("verbosity = %r", options.verbosity) logger.debug("options = %r; args = %r", options, args) logger.debug( "Done parsing command-line options. path_to_debug_info = %r, gdb_argv = %r", path_to_debug_info, gdb_argv) tempfilename = make_command_file(path_to_debug_info, no_import=no_import) logger.info("Launching %s with command file: %s and gdb_argv: %s", options.gdb, tempfilename, gdb_argv) with open(tempfilename) as tempfile: logger.debug('Command file (%s) contains: """\n%s"""', tempfilename, tempfile.read()) logger.info("Spawning %s...", options.gdb) p = subprocess.Popen([options.gdb, '-command', tempfilename] + gdb_argv) logger.info("Spawned %s (pid %d)", options.gdb, p.pid) while True: try: logger.debug("Waiting for gdb (pid %d) to exit...", p.pid) ret = p.wait() logger.debug( "Wait for gdb (pid %d) to exit is done. Returned: %r", p.pid, ret) except KeyboardInterrupt: pass else: break logger.debug("Closing temp command file with fd: %s", tempfile.fileno()) logger.debug("Removing temp command file: %s", tempfilename) os.remove(tempfilename) logger.debug("Removed temp command file: %s", tempfilename)
if trace_user and trace_kernel: tempfile.write("collect regs->cs\n") tempfile.write("collect $current_task_pid\n") tempfile.write("end\n") tempfile.write("trace smp_apic_timer_interrupt\n") tempfile.write("condition $bpnum " + pid_str + cond_str + "\n") tempfile.write("commands\n") tempfile.write("collect regs->ip\n") if trace_user and trace_kernel: tempfile.write("collect regs->cs\n") tempfile.write("collect $current_task_pid\n") tempfile.write("end\n") tempfile.close() tempfile = open(tempfilename, "r") print "Tracepoint command:" print tempfile.read() tempfile.close() gdb.execute("source " + tempfilename, True, False) os.remove(tempfilename) gdb.execute("set disconnected-tracing on", True, False) gdb.execute("tstart") gdb.execute("kill", True, False) signal.signal(signal.SIGINT, sigint_handler) signal.siginterrupt(signal.SIGINT, False) #Connect to pipe gdb.execute("target tfile /sys/kernel/debug/gtpframe_pipe") #-------------------------------------------------------------------------------------------------- #cycle
if trace_user and trace_kernel: tempfile.write("collect regs->cs\n") tempfile.write("collect $current_task_pid\n") tempfile.write("end\n") tempfile.write("trace smp_apic_timer_interrupt\n") tempfile.write("condition $bpnum "+pid_str+cond_str+"\n") tempfile.write("commands\n") tempfile.write("collect regs->ip\n") if trace_user and trace_kernel: tempfile.write("collect regs->cs\n") tempfile.write("collect $current_task_pid\n") tempfile.write("end\n") tempfile.close() tempfile = open(tempfilename, "r") print "Tracepoint command:" print tempfile.read() tempfile.close() gdb.execute("source "+tempfilename, True, False) os.remove(tempfilename) gdb.execute("set disconnected-tracing on", True, False) gdb.execute("tstart") gdb.execute("kill", True, False) signal.signal(signal.SIGINT, sigint_handler); signal.siginterrupt(signal.SIGINT, False); #Connect to pipe gdb.execute("target tfile /sys/kernel/debug/gtpframe_pipe") #-------------------------------------------------------------------------------------------------- #cycle
#tempfile module import tempfile #create a temp file tempfile = tempfile.TemporaryFile() #write to a temp file tempfile.write(b'Save this special nnumber for me: 01722750669') tempfile.seek(0) #read the temp file print(tempfile.read()) tempfile.close()
def pack(self): """ Packs the fasta and sqlite databases to remove redundancy and merge sqlite columns Returns: void """ # First we handle the fastq file unique_hash = set() redundant_file = fasta.file(self.database_fasta_file) temp = gzip.open(self.database_temp_file,'wb') for record in redundant_file.read(): if not record['header'] in unique_hash : unique_hash.add(record['header']) temp.write('>'+record['header']+os.linesep+record['sequence']+os.linesep) os.rename(self.database_temp_file , self.database_fasta_file) # Now the sqlite (http://stackoverflow.com/a/10856450) from StringIO import StringIO tempfile = StringIO() for line in self.sq3_connection.iterdump(): tempfile.write('%s\n' % line) tempfile.seek(0) sq3_temp_connection = sql.connect(self.database_temp_file) sq3_temp_cursor = sq3_temp_connection.cursor() sq3_temp_cursor.execute("CREATE TABLE IF NOT EXISTS genes ( sequence_hash TEXT, genus TEXT, species TEXT, NCBItaxID TEXT, kegg_ontology TEXT , kegg_reaction TEXT , go_term TEXT, kegg_map TEXT , sequence TEXT)") sq3_temp_connection.commit() sq3_memory_connection = sql.connect(":memory:") sq3_memory_cursor = sq3_memory_connection.cursor() sq3_memory_cursor.executescript(tempfile.read()) sq3_memory_connection.commit() sq3_memory_connection.row_factory = sql.Row for h in unique_hash: sq3_memory_cursor.execute('SELECT * FROM genes WHERE `sequence_hash` = \'%s\'' % h) rows = sq3_memory_cursor.fetchall() rows_selected = len(rows) columns = tuple ([c[0] for c in sq3_memory_cursor.description]) merge_dict = dict.fromkeys(columns) for r in rows: r=[str(x) if x else None for x in r ] incoming = dict(zip(columns,r)) merge_dict = merge_insert_dicts(merge_dict,incoming) merge_dict = {i:j for i,j in merge_dict.items() if j != []} insert = 'INSERT INTO genes({}) VALUES ({})'.format(', '.join(merge_dict.keys()),', '.join('?' * len(merge_dict))) try: sq3_temp_cursor.execute(insert,merge_dict.values()) except sql.Error as e: print merge_dict logger.warn(e) raise sq3_temp_connection.commit() sq3_temp_connection.close()
def main(path_to_debug_info=None, gdb_argv=None, no_import=False): """ Start the Cython debugger. This tells gdb to import the Cython and Python extensions (libcython.py and libpython.py) and it enables gdb's pending breakpoints. path_to_debug_info is the path to the Cython build directory gdb_argv is the list of options to gdb no_import tells cygdb whether it should import debug information """ parser = optparse.OptionParser(usage=usage) parser.add_option("--gdb-executable", dest="gdb", default='gdb', help="gdb executable to use [default: gdb]") parser.add_option("--verbose", "-v", dest="verbosity", action="count", default=0, help="Verbose mode. Multiple -v options increase the verbosity") (options, args) = parser.parse_args() if path_to_debug_info is None: if len(args) > 1: path_to_debug_info = args[0] else: path_to_debug_info = os.curdir if gdb_argv is None: gdb_argv = args[1:] if path_to_debug_info == '--': no_import = True logging_level = logging.WARN if options.verbosity == 1: logging_level = logging.INFO if options.verbosity >= 2: logging_level = logging.DEBUG logging.basicConfig(level=logging_level) logger.info("verbosity = %r", options.verbosity) logger.debug("options = %r; args = %r", options, args) logger.debug("Done parsing command-line options. path_to_debug_info = %r, gdb_argv = %r", path_to_debug_info, gdb_argv) tempfilename = make_command_file(path_to_debug_info, no_import=no_import) logger.info("Launching %s with command file: %s and gdb_argv: %s", options.gdb, tempfilename, gdb_argv) with open(tempfilename) as tempfile: logger.debug('Command file (%s) contains: """\n%s"""', tempfilename, tempfile.read()) logger.info("Spawning %s...", options.gdb) p = subprocess.Popen([options.gdb, '-command', tempfilename] + gdb_argv) logger.info("Spawned %s (pid %d)", options.gdb, p.pid) while True: try: logger.debug("Waiting for gdb (pid %d) to exit...", p.pid) ret = p.wait() logger.debug("Wait for gdb (pid %d) to exit is done. Returned: %r", p.pid, ret) except KeyboardInterrupt: pass else: break logger.debug("Closing temp command file with fd: %s", tempfile.fileno()) logger.debug("Removing temp command file: %s", tempfilename) os.remove(tempfilename) logger.debug("Removed temp command file: %s", tempfilename)