def edit(D, text): L = '\r\n' K = 'utf-8-sig' A = text import tempfile as H A = A or '' E = type(A) in [bytes, bytearray] if not E and A and not A.endswith(_D): A += _D I, B = H.mkstemp(prefix='editor-', suffix=D.extension) try: if not E: if WIN: F = K A = A.replace(_D, L) else: F = 'utf-8' A = A.encode(F) C = os.fdopen(I, 'wb') C.write(A) C.close() J = os.path.getmtime(B) D.edit_file(B) if D.require_save and os.path.getmtime(B) == J: return _A C = open(B, 'rb') try: G = C.read() finally: C.close() if E: return G else: return G.decode(K).replace(L, _D) finally: os.unlink(B)
def validate_key(self, public_key, jid): import tempfile temppath = os.path.join(tempfile.gettempdir(), 'temp_pubkey') with open(temppath, 'wb') as tempfile: tempfile.write(public_key) result = self.scan_keys(temppath) if result: for uid in result.uids: if uid.startswith('xmpp:'): if uid[5:] == jid: key_found = True else: log.warning('Found wrong userid in key: %s != %s', uid[5:], jid) log.debug(result) os.remove(temppath) return False if not key_found: log.warning('No valid userid found in key') log.debug(result) os.remove(temppath) return False log.info('Key validation succesful') os.remove(temppath) return True log.warning('Invalid key data: %s') log.debug(result) os.remove(temppath) return False
def _save_entries(self): """ saves the file entries """ tempname = NamedTemporaryFile().name # create a JSON dictionary store_dict = {} store_dict["max_id"] = self._max_id entry_list = [] for entry in self._entries: entry_dict = {} entry_dict["filepath"] = entry.get_filepath() entry_dict["timestamp"] = entry.get_timestamp() entry_dict["state"] = entry.get_state() entry_dict["entry_id"] = entry.get_entry_id() entry_list.append(entry_dict) store_dict["entries"] = entry_list line = json.dumps(store_dict) # crite JSON to temporary file try: tempfile = open(tempname, "w") tempfile.write(line) tempfile.close() except IOError: show_error_message("Unable to create temporary file %s." % tempname, True) # copy encrypted temporary file to cryptstore key = self.get_key() fname = "cryptbox.00000001" destpath = os.path.join(self._rootpath, fname) encrypt_file(tempname, destpath, key) # delete temporary file try: os.remove(tempname) except OSError: show_error_message("Unable to remove temporary file %s." % tempname)
def Run(self, tree): self.CreateTemporaryFiles() tempfile = open(self.mFilenameTempInput, "w") tempfile.write( to_string(input_tree, branchlengths=self.mBranchLengths, support=self.mSupport)) tempfile.close() if self.mLogLevel >= 2: os.system("cat %s" % self.mFilenameTempInput) statement = string.join( (self.mExecutable, "-v", self.mFilenameTempInput), " ") s = subprocess.Popen(statement, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.mTempDirectory, close_fds=True) (out, err) = s.communicate() if s.returncode != 0: raise TreeGraphError, "Error in calculating svg file\n%s" % err d = open(self.mFilenameTempOutput).readlines() self.DeleteTemporaryFiles() return "".join(d)
def setUp(self): # create DeviceOwner to pass the setup_wizard middleware check DeviceOwner.objects.create(username='******', password=123) self.client = Client() self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest() self.extension = dict(file_formats.choices).get("pdf") self.filename = "{}.{}".format(self.hash, self.extension) self.title = "abc123!@#$%^&*();'[],./?><" self.contentnode = ContentNode(title=self.title) self.available = True self.preset = format_presets.DOCUMENT self.file = File(checksum=self.hash, extension=self.extension, available=self.available, contentnode=self.contentnode, preset=self.preset) self.path = get_content_storage_file_path(self.filename) path_dir = os.path.dirname(self.path) if not os.path.exists(path_dir): os.makedirs(path_dir) tempfile = open(self.path, "w") tempfile.write("test") tempfile.close()
def setUp(self): provision_device() self.client = Client() self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest() self.extension = file_formats.PDF self.filename = "{}.{}".format(self.hash, self.extension) self.title = "abc123!@#$%^&*();'[],./?><" self.contentnode = ContentNode(title=self.title) self.available = True self.preset = format_presets.DOCUMENT self.local_file = LocalFile(id=self.hash, extension=self.extension, available=self.available) self.file = File(local_file=self.local_file, available=self.available, contentnode=self.contentnode, preset=self.preset) self.path = get_content_storage_file_path(self.filename) path_dir = os.path.dirname(self.path) if not os.path.exists(path_dir): os.makedirs(path_dir) tempfile = open(self.path, "w") tempfile.write("test") tempfile.close()
def Run(self, tree): self.CreateTemporaryFiles() tempfile = open(self.mFilenameTempInput, "w") tempfile.write(to_string(input_tree, branchlengths=self.mBranchLengths, support=self.mSupport)) tempfile.close() if self.mLogLevel >= 2: os.system("cat %s" % self.mFilenameTempInput) statement = string.join((self.mExecutable, "-v", self.mFilenameTempInput), " ") s = subprocess.Popen( statement, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.mTempDirectory, close_fds=True, ) (out, err) = s.communicate() if s.returncode != 0: raise TreeGraphError, "Error in calculating svg file\n%s" % err d = open(self.mFilenameTempOutput).readlines() self.DeleteTemporaryFiles() return "".join(d)
def test_named_tempfile1(): name = None with named_tempfile() as tempfile: name = tempfile.name assert_true(os.path.isfile(name)) tempfile.write('hello'.encode('utf8')) tempfile.close() assert_true(os.path.isfile(name)) assert_false(os.path.isfile(name))
def browse_disptrace(dt): import tempfile, webbrowser, urllib, os html = dt.render() tempfiledes, temppath = tempfile.mkstemp(suffix='.html') tempfile = os.fdopen(tempfiledes, "w") tempfile.write(html) tempfile.close() tempurl = "file://{}".format(urllib.pathname2url(temppath)) webbrowser.get(None).open_new(tempurl)
def test_named_tempfile2(): name = None # The specification of delete=True should be ignored. with named_tempfile(delete=True) as tempfile: name = tempfile.name assert_true(os.path.isfile(name)) tempfile.write('hello'.encode('utf8')) tempfile.close() assert_true(os.path.isfile(name)) assert_false(os.path.isfile(name))
def setUpClass(cls): # Create a predictable 296.1 MB temporary file elements = [200, 50, 25] * 9999 cls.temp_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', "%s.bin" % uuid.uuid4()) tempfile = open(cls.temp_filename, 'wb') for i in xrange(0, 9872): tempfile.write(bytearray(elements)) tempfile.close()
def main(): treefile=args.treefile generation=args.ganeration tempfile.tempdir = args.tmpdir for line in treefile: if "/" not in line and : tempfile.write(line) for g,line in enumerate(tempfile): if g<=genetation : tempfile.write(line)
def ansible_save(hosts, content, dest_filepath): with NamedTemporaryFile() as tempfile: tempfile.write(content) tempfile.flush() module_name = 'copy' module_args = 'src={src} dest={conf_file}'.format( src=tempfile.name, conf_file=dest_filepath, ) results = run(hosts, module_name, module_args, ) return results
def write_element_tempfile(element, tempfile): """ Writes element to temp file """ if element != None and ElementTree.iselement(element): try: tempfile.write(ElementTree.tostring(element)) except Exception, e: raise exceptions.ConeException( 'Cannot write Element to file (%s). Exception: %s' % (tempfile, e))
def test_invalid_db_file(self): invalid_sql_file = os.path.join(self.data_dir, 'invalid_db_file.db') with open(invalid_sql_file, 'w') as tempfile: tempfile.write(u'[invalid data]') invalid_notary = sign.NotebookNotary( db_file=invalid_sql_file, secret=b'secret', ) invalid_notary.sign(self.nb) testpath.assert_isfile(os.path.join(self.data_dir, invalid_sql_file)) testpath.assert_isfile(os.path.join(self.data_dir, invalid_sql_file + '.bak'))
def _load_db_to_memory(in_db_path): # Read database to tempfile conn = sqlite3.connect('file:%s?mode=ro' % in_db_path, uri=True) tempfile = io.StringIO() for line in conn.iterdump(): tempfile.write('%s\n' % line) conn.close() tempfile.seek(0) # Create a database in memory and import from tempfile conn = sqlite3.connect(":memory:") conn.cursor().executescript(tempfile.read()) return conn
def test_invalid_db_file(self): invalid_sql_file = os.path.join(self.data_dir, 'invalid_db_file.db') with open(invalid_sql_file, 'w') as tempfile: tempfile.write(u'[invalid data]') invalid_notary = sign.NotebookNotary( db_file=invalid_sql_file, secret=b'secret', ) invalid_notary.sign(self.nb) testpath.assert_isfile(os.path.join(self.data_dir, invalid_sql_file)) testpath.assert_isfile( os.path.join(self.data_dir, invalid_sql_file + '.bak'))
def edit_in_vim(content): ''' Takes text, writes to a temp file, opens the temp file with vim, you edit file in vim and save. Returns the saved text. ''' import subprocess import tempfile with tempfile.NamedTemporaryFile(suffix='blogpost') as tempfile: tempfile.write(content.encode('utf-8')) tempfile.flush() subprocess.call(['vim', tempfile.name]) text = open(tempfile.name, 'r').read().decode('utf-8') return text
def test_invalid_db_file(self): invalid_sql_file = os.path.join(self.data_dir, "invalid_db_file.db") with open(invalid_sql_file, "w") as tempfile: tempfile.write("[invalid data]") invalid_notary = sign.NotebookNotary( db_file=invalid_sql_file, secret=b"secret", ) invalid_notary.sign(self.nb) invalid_notary.store.close() testpath.assert_isfile(os.path.join(self.data_dir, invalid_sql_file)) testpath.assert_isfile( os.path.join(self.data_dir, invalid_sql_file + ".bak"))
def ansible_save(hosts, content, dest_filepath): with NamedTemporaryFile() as tempfile: tempfile.write(content) tempfile.flush() module_name = 'copy' module_args = 'src={src} dest={conf_file}'.format( src=tempfile.name, conf_file=dest_filepath, ) results = run( hosts, module_name, module_args, ) return results
def message_open(self, muuid, online=False): """Extracts the HTML Site to a Temp File and Shows it in Webbrowser. The Message is set read. Temp Files are deleted in __del__ function. """ self.log.info('Open: %s'%muuid) if online: url = self.message_get_meta(muuid)['url'] webbrowser.open_new_tab(url) else: mime = self.message_get_meta(muuid)['mimetype'] extension = self.mimetypes.get_extension(mime) name = str(uuid.uuid4()) + extension[0] path = os.path.join(self.tempdir,name) tempfile = open(path,'wb') if extension[0] == '.html': meta = self.message_get_meta(muuid) data = self.message_get_data(muuid).encode(meta['encoding']).encode(meta['encoding']) tempfile.write(data) tempfile.close() webbrowser.open_new_tab(path) else: data = self.message_get_data(muuid) tempfile.write(data) tempfile.close() if sys.platform.startswith('darwin'): subprocess.call(('open', path)) elif os.name == 'nt': os.startfile(path) elif os.name == 'posix': subprocess.call(('xdg-open', path)) self.message_set_meta(muuid, 'read', True)
def setUp(self): # create DeviceOwner to pass the setup_wizard middleware check DeviceOwner.objects.create(username='******', password=123) self.client = Client() self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest() self.extension = file_formats.PDF self.filename = "{}.{}".format(self.hash, self.extension) self.title = "abc123!@#$%^&*();'[],./?><" self.contentnode = ContentNode(title=self.title) self.available = True self.preset = format_presets.DOCUMENT self.file = File(checksum=self.hash, extension=self.extension, available=self.available, contentnode=self.contentnode, preset=self.preset) self.path = get_content_storage_file_path(self.filename) path_dir = os.path.dirname(self.path) if not os.path.exists(path_dir): os.makedirs(path_dir) tempfile = open(self.path, "w") tempfile.write("test") tempfile.close()
def _save_entries(self): """ saves the file entries """ tempname = NamedTemporaryFile().name # create a JSON dictionary store_dict = {} store_dict["max_id"] = self._max_id entry_list = [] for entry in self._entries: entry_dict = {} entry_dict["filepath"] = entry.get_filepath() entry_dict["timestamp"] = entry.get_timestamp() entry_dict["state"] = entry.get_state() entry_dict["entry_id"] = entry.get_entry_id() entry_list.append(entry_dict) store_dict["entries"] = entry_list line = json.dumps(store_dict) # crite JSON to temporary file try: tempfile = open(tempname, "w") tempfile.write(line) tempfile.close() except IOError: show_error_message( "Unable to create temporary file %s." % tempname, True) # copy encrypted temporary file to cryptstore key = self.get_key() fname = "cryptbox.00000001" destpath = os.path.join(self._rootpath, fname) encrypt_file(tempname, destpath, key) # delete temporary file try: os.remove(tempname) except OSError: show_error_message("Unable to remove temporary file %s." % tempname)
def fetch_images(etree): with pushd_temp_dir(): filename_to_node = collections.OrderedDict() img_nodes = etree.findall(".//img") base_node = etree.find(".//base") try: base_url = base_node.get('href') except Exception as e: base_url = "" for node in img_nodes: if base_url is "" or None: img_url = node.get('src') else: img_url = base_url + '/' + node.get('src') website = urllib.request.Request(img_url) website.add_header( 'User-Agent', 'PurdueUniversityClassProject/1.0 ([email protected] https://goo.gl/dk8u5S)' ) img = urllib.request.urlopen(website) type = img.info().get('Content-Type') ext = type.split('/')[1] filename = make_filename(img_url, ext) with open(filename, 'wb') as tempfile: tempfile.write(img.read()) if ext in ['gif', 'GIF']: ext = 'jpg' new_filename = make_filename(img_url, ext) Image.open(filename).convert('RGB').save(new_filename) filename = new_filename elif ext in ['jpg', 'jpeg', 'png', 'jpe', '.jp2']: filename_to_node[filename] = node yield filename_to_node
def s3_upload(hashkey, size=ORIGINAL_SIZE): """ Upload a screenshot PNG file to Amazon S3. This uses httplib directly and transfers the file in small chunks, so we don't have to load the whole PNG file into RAM. """ from shotserver04.screenshots import s3 aws = s3.AWSAuthConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, is_secure=False) s3_bucket = settings.S3_BUCKETS[str(size)] s3_key = hashkey + '.png' server = s3.DEFAULT_HOST method = 'PUT' path = '/%s/%s' % (s3_bucket, urllib.quote_plus(s3_key)) filename = png_filename(hashkey, size) f = file(filename, 'rb') f.seek(0, 2) # os.SEEK_END for Python < 2.5 bytes_total = f.tell() f.seek(0, 0) # os.SEEK_SET for Python < 2.5 headers = { 'User-Agent': 'shotserver/0.4', 'Host': server, 'x-amz-acl': 'public-read', 'Content-Type': 'image/png', 'Content-Length': str(bytes_total), } query_args = {} aws._add_aws_auth_header(headers, method, s3_bucket, s3_key, query_args) host = '%s:%d' % (server, 80) conn = httplib.HTTPConnection(host) conn.putrequest(method, path) for header_key, header_value in headers.iteritems(): conn.putheader(header_key, header_value) conn.endheaders() bytes_sent = 0 while True: bytes = f.read(BUFFER_SIZE) if not bytes: break conn.send(bytes) bytes_sent += len(bytes) # print 'sent', bytes_sent, 'of', bytes_total, 'bytes', # print '(%.1f%%)' % (100.0 * bytes_sent / bytes_total) assert bytes_sent == bytes_total f.close() response = conn.getresponse() if response.status != 200: raise Fault(response.status, response.read()) # print 'http://%s/%s' % (s3_bucket, s3_key) # Write response from S3 to tempfile for debugging if DEBUG_HEADERS and str(size) == '160': tempfile = file('/tmp/%s.txt' % hashkey, 'w') tempfile.write('==== Request headers ====\n') tempfile.write('%s %s HTTP/1.1\n' % (method, path)) for header, value in headers.iteritems(): tempfile.write('%s: %s\n' % (header, value)) tempfile.write('\n') tempfile.write('==== Response headers ====\n') tempfile.write('HTTP/1.1 %s %s\n' % (response.status, response.reason)) for header, value in response.getheaders(): tempfile.write('%s: %s\n' % (header, value)) tempfile.write('\n') tempfile.write(response.read()) # tempfile.write(response.msg) tempfile.close() conn.close()
if s[0:1] == 'y' or s[0:1] == 'Y': getmod() cpu_number = int(gdb.parse_and_eval("$cpu_number")) tempfilename = tempfile.mktemp() tempfile = open(tempfilename, "w") if no_task: #Setup first tracepoint ignore_str += get_ignore_str("arch_local_irq_enable") ignore_str += get_ignore_str("intel_idle") # GDB have bug with long conditon so close them #ignore_str += get_ignore_str("__do_softirq") #ignore_str += get_ignore_str("_raw_spin_unlock_irqrestore") for i in range(0, cpu_number): tempfile.write("tvariable $pc_ip"+str(i)+"\n") tempfile.write("tvariable $pc_cs"+str(i)+"\n") tempfile.write("trace handle_irq\n") tempfile.write("commands\n") tempfile.write("teval $pc_ip0=(u64)regs->ip\n") tempfile.write("teval $pc_cs0=(u64)regs->cs\n") tempfile.write("end\n") #Setup second tracepoint tempfile.write("trace handle_irq\n") cond_str = " (($pc_cs0 & 3) == 0)" tempfile.write("condition $bpnum "+cond_str+ignore_str+"\n") tempfile.write("commands\n") tempfile.write("collect $no_self_trace\n") tempfile.write("collect $pc_ip0\n") else: tempfile.write("trace handle_irq\n")
import tempfile def find_gitroot(filepath_reference): path = filepath_reference path_prev = "" while not os.path.exists(os.path.join(path, ".git")) and path != path_prev: path_prev = path path = os.path.dirname(path) return path doxyfile, sourcefile = sys.argv[-2:] doxyfile = os.path.join(find_gitroot(sourcefile), doxyfile) os.chdir(os.path.dirname(doxyfile)) tempfile = tempfile.NamedTemporaryFile(mode='w+b') doxyfile_tmp = tempfile.name tempfile.write(open(doxyfile, "r+b").read()) tempfile.write(b'\n\n') tempfile.write(b'INPUT=' + os.fsencode(sourcefile) + b'\n') tempfile.flush() subprocess.call(("doxygen", doxyfile_tmp)) del tempfile # Maybe handy, but also annoying? if "--browse" in sys.argv: import webbrowser webbrowser.open("html/files.html")
def pack(self): """ Packs the fasta and sqlite databases to remove redundancy and merge sqlite columns Returns: void """ # First we handle the fastq file unique_hash = set() redundant_file = fasta.file(self.database_fasta_file) temp = gzip.open(self.database_temp_file,'wb') for record in redundant_file.read(): if not record['header'] in unique_hash : unique_hash.add(record['header']) temp.write('>'+record['header']+os.linesep+record['sequence']+os.linesep) os.rename(self.database_temp_file , self.database_fasta_file) # Now the sqlite (http://stackoverflow.com/a/10856450) from StringIO import StringIO tempfile = StringIO() for line in self.sq3_connection.iterdump(): tempfile.write('%s\n' % line) tempfile.seek(0) sq3_temp_connection = sql.connect(self.database_temp_file) sq3_temp_cursor = sq3_temp_connection.cursor() sq3_temp_cursor.execute("CREATE TABLE IF NOT EXISTS genes ( sequence_hash TEXT, genus TEXT, species TEXT, NCBItaxID TEXT, kegg_ontology TEXT , kegg_reaction TEXT , go_term TEXT, kegg_map TEXT , sequence TEXT)") sq3_temp_connection.commit() sq3_memory_connection = sql.connect(":memory:") sq3_memory_cursor = sq3_memory_connection.cursor() sq3_memory_cursor.executescript(tempfile.read()) sq3_memory_connection.commit() sq3_memory_connection.row_factory = sql.Row for h in unique_hash: sq3_memory_cursor.execute('SELECT * FROM genes WHERE `sequence_hash` = \'%s\'' % h) rows = sq3_memory_cursor.fetchall() rows_selected = len(rows) columns = tuple ([c[0] for c in sq3_memory_cursor.description]) merge_dict = dict.fromkeys(columns) for r in rows: r=[str(x) if x else None for x in r ] incoming = dict(zip(columns,r)) merge_dict = merge_insert_dicts(merge_dict,incoming) merge_dict = {i:j for i,j in merge_dict.items() if j != []} insert = 'INSERT INTO genes({}) VALUES ({})'.format(', '.join(merge_dict.keys()),', '.join('?' * len(merge_dict))) try: sq3_temp_cursor.execute(insert,merge_dict.values()) except sql.Error as e: print merge_dict logger.warn(e) raise sq3_temp_connection.commit() sq3_temp_connection.close()
if s[0:1] == 'y' or s[0:1] == 'Y': getmod() cpu_number = int(gdb.parse_and_eval("$cpu_number")) tempfilename = tempfile.mktemp() tempfile = open(tempfilename, "w") if no_task: ignore_str = "" #Setup first tracepoint ignore_str += get_ignore_str("arch_local_irq_enable") ignore_str += get_ignore_str("intel_idle") # GDB have bug with long conditon so close them #ignore_str += get_ignore_str("__do_softirq") #ignore_str += get_ignore_str("_raw_spin_unlock_irqrestore") tempfile.write("tvariable $p_ip\n") tempfile.write("tvariable $p_cs\n") tempfile.write("trace handle_irq\n") tempfile.write("commands\n") tempfile.write("teval $p_ip=(u64)regs->ip\n") tempfile.write("teval $p_cs=(u64)regs->cs\n") tempfile.write("end\n") #Setup second tracepoint tempfile.write("trace handle_irq\n") cond_str = " (($p_cs & 3) == 0)" tempfile.write("condition $bpnum " + cond_str + ignore_str + "\n") tempfile.write("commands\n") #tempfile.write("collect $no_self_trace\n") tempfile.write("collect $p_ip\n") tempfile.write("end\n") tempfile.write("trace smp_apic_timer_interrupt\n")
def _download_and_unpack_file(url): """Downloads the database files created with setup-exfor-db.py as a tarball and unpacks them to the correct folder.""" from tqdm import tqdm from glob import glob import requests import math import tarfile import tempfile import shutil # cleanup for f in [ fullIndexFileName, fullErrorFileName, fullCoupledFileName, fullMonitoredFileName, fullReactionCountFileName, fullDBPath, dbTagFile ]: try: shutil.rmtree(f) except NotADirectoryError: os.remove(f) except FileNotFoundError: pass # Tag files: tag_files = [ f for tag in ['X4-*', 'EXFOR-*'] for f in glob(os.path.join(DATAPATH, tag)) ] for tagfile in tag_files: try: os.remove(tagfile) except FileNotFoundError: pass # Streaming, so we can iterate over the response. r = requests.get(url, stream=True) tarname = os.path.basename(url) # Total size in bytes. total_size = int(r.headers.get('content-length', 0)) block_size = 1024 * 1024 wrote = 0 tempfile = tempfile.TemporaryFile() print('Downloading data file', tarname) for data in tqdm(r.iter_content(block_size), total=math.ceil(total_size // block_size), unit='MB', unit_scale=True): wrote = wrote + len(data) tempfile.write(data) if total_size != 0 and wrote != total_size: raise Exception("ERROR, something went wrong") tempfile.flush() tempfile.seek(0) print('Decompressing archive', tarname) wrote = 0 with tarfile.open(fileobj=tempfile, mode='r') as _tar: total = len(_tar.getmembers()) for member in tqdm(_tar.getmembers(), total=total): wrote = wrote + len(data) _tar.extract(member, DATAPATH) tempfile.close() with open(dbTagFile,'wb') as f: print('Installed database version', dbTagFile) pass
elif x[0] == '.keep': keep = True break elif x[0] == '.noforward': actions['forward'] = [] print 'Removing forwards...' continue elif x[0] == '.forward': actions['forward'] += [x[1]] print 'Adding forward:', x[1] continue elif x[0] == '.edit': default = ['base'] if len(x) == 1 else x[[1]:] print 'Entering editor...' with tempfile.NamedTemporaryFile(suffix=".tmp") as tempfile: tempfile.write(comment + '\n' + '-' * 10 + '\n' + '\n'.join(map(lambda x: responses[x], default))) tempfile.flush() call([EDITOR, tempfile.name]) tempfile.flush() tempfile.seek(0) contents = tempfile.readlines() print contents reply = ''.join(contents[contents.index('-'*10+'\r\n') + 1 :]) elif x[0] in responses: reply = responses[x[0]] break else: try: choice = int(x[0]) - 1 except: continue
#tempfile module import tempfile #create a temp file tempfile = tempfile.TemporaryFile() #write to a temp file tempfile.write(b'Save this special nnumber for me: 01722750669') tempfile.seek(0) #read the temp file print(tempfile.read()) tempfile.close()
def find_gitroot(filepath_reference): path = filepath_reference path_prev = "" while not os.path.exists(os.path.join(path, ".git")) and path != path_prev: path_prev = path path = os.path.dirname(path) return path doxyfile, sourcefile = sys.argv[-2:] doxyfile = os.path.join(find_gitroot(sourcefile), doxyfile) os.chdir(os.path.dirname(doxyfile)) tempfile = tempfile.NamedTemporaryFile(mode='w+b') doxyfile_tmp = tempfile.name tempfile.write(open(doxyfile, "r+b").read()) tempfile.write(b'\n\n') tempfile.write(b'INPUT=' + os.fsencode(sourcefile) + b'\n') tempfile.flush() subprocess.call(("doxygen", doxyfile_tmp)) del tempfile # Maybe handy, but also annoying? if "--browse" in sys.argv: import webbrowser webbrowser.open("html/files.html")
lower_case_char.append(32) out = "" for x,y in enumerate(tk_files): map_char = [0] * 256 pass_flag = True # Populate character map with characters in a document with open(y,'r') as f: # Iterates through each character of the file at hand for ch in iter(lambda: f.read(1), ''): # ACSII mapping of file characters map_char[ord(ch)] += 1 # Iterate through character map out += "-" * 50 + "\n" out += y + ": Token Verification" + "\n" for x,y in enumerate(map_char): if x not in lower_case_char and y != 0: pass_flag = False out += (str(y).ljust(5) + " - Occurrences of ASCII [" + str(x) + "] -> [" + chr(x) + "]\n") out += "Test Results: PASS\n" if pass_flag else "Test Results: FAIL\n" out += ("-" * 50) + "\n" EDITOR = os.environ.get('EDITOR','vim') initial_message = "Hello world" with tempfile.NamedTemporaryFile(suffix=".tmp") as tempfile: tempfile.write(out) tempfile.flush() call([EDITOR, tempfile.name])
if s[0:1] == 'y' or s[0:1] == 'Y': getmod() cpu_number = int(gdb.parse_and_eval("$cpu_number")) tempfilename = tempfile.mktemp() tempfile = open(tempfilename, "w") if no_task: #Setup first tracepoint ignore_str += get_ignore_str("arch_local_irq_enable") ignore_str += get_ignore_str("intel_idle") # GDB have bug with long conditon so close them #ignore_str += get_ignore_str("__do_softirq") #ignore_str += get_ignore_str("_raw_spin_unlock_irqrestore") for i in range(0, cpu_number): tempfile.write("tvariable $pc_ip" + str(i) + "\n") tempfile.write("tvariable $pc_cs" + str(i) + "\n") tempfile.write("trace handle_irq\n") tempfile.write("commands\n") tempfile.write("teval $pc_ip0=(u64)regs->ip\n") tempfile.write("teval $pc_cs0=(u64)regs->cs\n") tempfile.write("end\n") #Setup second tracepoint tempfile.write("trace handle_irq\n") cond_str = " (($pc_cs0 & 3) == 0)" tempfile.write("condition $bpnum " + cond_str + ignore_str + "\n") tempfile.write("commands\n") tempfile.write("collect $no_self_trace\n") tempfile.write("collect $pc_ip0\n") else: tempfile.write("trace handle_irq\n")
def s3_upload(hashkey, size=ORIGINAL_SIZE): """ Upload a screenshot PNG file to Amazon S3. This uses httplib directly and transfers the file in small chunks, so we don't have to load the whole PNG file into RAM. """ from shotserver04.screenshots import s3 aws = s3.AWSAuthConnection( settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, is_secure=False) s3_bucket = settings.S3_BUCKETS[str(size)] s3_key = hashkey + '.png' server = s3.DEFAULT_HOST method = 'PUT' path = '/%s/%s' % (s3_bucket, urllib.quote_plus(s3_key)) filename = png_filename(hashkey, size) f = file(filename, 'rb') f.seek(0, 2) # os.SEEK_END for Python < 2.5 bytes_total = f.tell() f.seek(0, 0) # os.SEEK_SET for Python < 2.5 headers = { 'User-Agent': 'shotserver/0.4', 'Host': server, 'x-amz-acl': 'public-read', 'Content-Type': 'image/png', 'Content-Length': str(bytes_total), } query_args = {} aws._add_aws_auth_header(headers, method, s3_bucket, s3_key, query_args) host = '%s:%d' % (server, 80) conn = httplib.HTTPConnection(host) conn.putrequest(method, path) for header_key, header_value in headers.iteritems(): conn.putheader(header_key, header_value) conn.endheaders() bytes_sent = 0 while True: bytes = f.read(BUFFER_SIZE) if not bytes: break conn.send(bytes) bytes_sent += len(bytes) # print 'sent', bytes_sent, 'of', bytes_total, 'bytes', # print '(%.1f%%)' % (100.0 * bytes_sent / bytes_total) assert bytes_sent == bytes_total f.close() response = conn.getresponse() if response.status != 200: raise Fault(response.status, response.read()) # print 'http://%s/%s' % (s3_bucket, s3_key) # Write response from S3 to tempfile for debugging if DEBUG_HEADERS and str(size) == '160': tempfile = file('/tmp/%s.txt' % hashkey, 'w') tempfile.write('==== Request headers ====\n') tempfile.write('%s %s HTTP/1.1\n' % (method, path)) for header, value in headers.iteritems(): tempfile.write('%s: %s\n' % (header, value)) tempfile.write('\n') tempfile.write('==== Response headers ====\n') tempfile.write('HTTP/1.1 %s %s\n' % (response.status, response.reason)) for header, value in response.getheaders(): tempfile.write('%s: %s\n' % (header, value)) tempfile.write('\n') tempfile.write(response.read()) # tempfile.write(response.msg) tempfile.close() conn.close()
import sys, tempfile, os from subprocess import call EDITOR = os.environ.get('EDITOR','vim') initial_message="INSERT INITIAL MESSAGE HERE" with tempfile.NamedTemporaryFile(suffix=".tmp") as tempfile: tempfile.write(initial_message) tempfile.flush() call([EDITOR, tempfile.name]) print("WE NOW PRINT THE CONTENTS OF THE TEMPORARY FILE") f=open(tempfile.name,'r') for line in f: print line.strip()
if s[0:1] == 'y' or s[0:1] == 'Y': getmod() cpu_number = int(gdb.parse_and_eval("$cpu_number")) tempfilename = tempfile.mktemp() tempfile = open(tempfilename, "w") if no_task: ignore_str = "" #Setup first tracepoint ignore_str += get_ignore_str("arch_local_irq_enable") ignore_str += get_ignore_str("intel_idle") # GDB have bug with long conditon so close them #ignore_str += get_ignore_str("__do_softirq") #ignore_str += get_ignore_str("_raw_spin_unlock_irqrestore") tempfile.write("tvariable $p_ip\n") tempfile.write("tvariable $p_cs\n") tempfile.write("trace handle_irq\n") tempfile.write("commands\n") tempfile.write("teval $p_ip=(u64)regs->ip\n") tempfile.write("teval $p_cs=(u64)regs->cs\n") tempfile.write("end\n") #Setup second tracepoint tempfile.write("trace handle_irq\n") cond_str = " (($p_cs & 3) == 0)" tempfile.write("condition $bpnum "+cond_str+ignore_str+"\n") tempfile.write("commands\n") #tempfile.write("collect $no_self_trace\n") tempfile.write("collect $p_ip\n") tempfile.write("end\n") tempfile.write("trace smp_apic_timer_interrupt\n")
out = "" for x, y in enumerate(tk_files): map_char = [0] * 256 pass_flag = True # Populate character map with characters in a document with open(y, 'r') as f: # Iterates through each character of the file at hand for ch in iter(lambda: f.read(1), ''): # ACSII mapping of file characters map_char[ord(ch)] += 1 # Iterate through character map out += "-" * 50 + "\n" out += y + ": Token Verification" + "\n" for x, y in enumerate(map_char): if x not in lower_case_char and y != 0: pass_flag = False out += (str(y).ljust(5) + " - Occurrences of ASCII [" + str(x) + "] -> [" + chr(x) + "]\n") out += "Test Results: PASS\n" if pass_flag else "Test Results: FAIL\n" out += ("-" * 50) + "\n" EDITOR = os.environ.get('EDITOR', 'vim') initial_message = "Hello world" with tempfile.NamedTemporaryFile(suffix=".tmp") as tempfile: tempfile.write(out) tempfile.flush() call([EDITOR, tempfile.name])
cur.execute("FLUSH TABLES WITH READ LOCK") # SHOW MASTER STATUS - Grab masterFile and masterPOS for use in file to be sent to the Slave # SHOW Value without columns showMaster = 'mysql -h '+ masterHost+ ' -P '+masterPort+' -u '+masterUser+ ' -p'+masterPasswd+' --skip-column-names -e'+'\'SHOW MASTER STATUS;\'' # Grab values for File and Position p = os.popen(showMaster,'r', 1) str1 = p.read() str2 = str1.split() masterFile = str2[0] masterPOS = str2[1] f = tempfile.mktemp() tempfile = open(f, 'w') tempfile.write('STOP SLAVE; \n') # SQL Dump takeaDUMP = 'mysqldump -h '+ str(masterHost)+ ' -P '+ str(masterPort) + ' --all-database --add-drop-table --add-drop-database ' + '-u ' + str(masterUser) + ' -p'+ masterPasswd # Push DUMP into the file p1 = os.popen(takeaDUMP,'r',1) strDUMP = p1.read() tempfile.write(strDUMP + '\n') # Push Instructions for the slave tempfile.write('CHANGE MASTER TO MASTER_HOST = ' +'\''+ str(masterHost)+ '\','+ '\n') tempfile.write('MASTER_PORT = '+ str(masterPort) +','+'\n') tempfile.write('MASTER_USER = '******'\''+str(masterUser) +'\''+','+'\n') tempfile.write('MASTER_PASSWORD = '******'\''+ str(masterPasswd) +'\''+','+'\n') tempfile.write('MASTER_LOG_FILE = '+ '\''+masterFile +'\''+','+'\n' )
def write_to_tempfile(tempfile_info, html): tempfile = tempfile_info[0] tempfile_name = tempfile_info[1] tempfile.write(html) tempfile.close()