def extend_chrome_params(parsed_args, params): # On Cygwin with 64bit Windows, stdout and stderr for NaCl are not supported # yet. (crbug.com/171836) In the mean time, we use a workaround by # redirecting the stdout and stderr to each file, and reading it by # "tail -f". "--no-sandbox" is needed for the redirection. # Note: two alternatives that are not chosen: # 1) Use named pipe: On cygwin, named pipe seems not well supported, # unfortunately. For example, cygwin's named pipe is not accessible from # native windows environment. Also, it is not easy to create/read windows # native named pipe from cygwin. # 2) Observe files by, e.g., inotify family or FindFirstChangeNotification: # At the moment, there seems no big merit, and these need more complicated # build system for windows. params.append('--no-sandbox') # Set each temporary file path for the redirection (if necessary). # os.tempnam may be insecure in general, but on cygwin, tempfile module seems # to have some race condition, when it is used to communicate between a # program running on Cygwin and one running on Windows native environment. os.environ.setdefault( 'NACL_EXE_STDOUT', resolve_cygpath(os.tempnam( tempfile.gettempdir(), _DEFAULT_STDOUT_PREFIX))) os.environ.setdefault( 'NACL_EXE_STDERR', resolve_cygpath(os.tempnam( tempfile.gettempdir(), _DEFAULT_STDERR_PREFIX))) os.environ.setdefault( 'NACLLOG', resolve_cygpath(os.tempnam( tempfile.gettempdir(), _DEFAULT_NACLLOG_PREFIX)))
def test_save(self): s = sol.SOL('hello') s.update({'name': 'value', 'spam': 'eggs'}) x = os.tempnam() s.save(x) try: self.assertEquals(open(x, 'rb').read(), HelperTestCase.contents) except: if os.path.isfile(x): os.unlink(x) raise x = os.tempnam() fp = open(x, 'wb+') self.assertEquals(fp.closed, False) s.save(fp) self.assertNotEquals(fp.tell(), 0) fp.seek(0) self.assertEquals(fp.read(), HelperTestCase.contents) self.assertEquals(fp.closed, False) try: self.assertEquals(open(x, 'rb').read(), HelperTestCase.contents) except: if os.path.isfile(x): os.unlink(x) raise
def generate_crontab(self,cr, user, ids, context=None): #Get Command from database commands = self.get_command(cr, user, ids, context) for id in ids: working_path = commands[id].get('working_path',self._root) #Create temporary. tmpfn1 = os.tempnam(working_path,'oe1') tmpfn2 = os.tempnam(working_path,'oe2') #Extract Crontab to temporary file #Note,make sure you have permission to access directory and directory exists. p = subprocess.call(["crontab -l > "+ tmpfn1], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) #Search with "#Start:OE-->" + name crontrab and delete it. subprocess.call(["sed '/#Start:OE-->"+ (commands[id].get('name',False) or "") +"/d' "+ tmpfn1 +" > "+ tmpfn2], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) if commands[id].get('active',False):#Active and state is done #Append new command into temporary file fo = open(tmpfn2, "a") fo.write( commands[id].get('schedule', "") + " " + commands[id].get('command', "")+ ">>" + working_path +"/crontab_oe.log\n"); fo.close() #Generate the Crontab from file. p = subprocess.call(["crontab "+ tmpfn2], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) #Delete temporary file p = subprocess.call(["rm "+ tmpfn1], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) p = subprocess.call(["rm "+ tmpfn2], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) return True
def test_H_get_put(self): """ verify that get/put work. """ import os, warnings warnings.filterwarnings('ignore', 'tempnam.*') localname = os.tempnam() text = 'All I wanted was a plastic bunny rabbit.\n' f = open(localname, 'wb') f.write(text) f.close() sftp.put(localname, FOLDER + '/bunny.txt') f = sftp.open(FOLDER + '/bunny.txt', 'r') self.assertEquals(text, f.read(128)) f.close() os.unlink(localname) localname = os.tempnam() sftp.get(FOLDER + '/bunny.txt', localname) f = open(localname, 'rb') self.assertEquals(text, f.read(128)) f.close() os.unlink(localname) sftp.unlink(FOLDER + '/bunny.txt')
def test_H_get_put(self): """ verify that get/put work. """ warnings.filterwarnings('ignore', 'tempnam.*') localname = os.tempnam() text = 'All I wanted was a plastic bunny rabbit.\n' f = open(localname, 'wb') f.write(text) f.close() saved_progress = [] def progress_callback(x, y): saved_progress.append((x, y)) sftp.put(localname, FOLDER + '/bunny.txt', progress_callback) f = sftp.open(FOLDER + '/bunny.txt', 'r') self.assertEquals(text, f.read(128)) f.close() self.assertEquals((41, 41), saved_progress[-1]) os.unlink(localname) localname = os.tempnam() saved_progress = [] sftp.get(FOLDER + '/bunny.txt', localname, progress_callback) f = open(localname, 'rb') self.assertEquals(text, f.read(128)) f.close() self.assertEquals((41, 41), saved_progress[-1]) os.unlink(localname) sftp.unlink(FOLDER + '/bunny.txt')
def pdf_merge(pdf1, pdf2): try: tmp1=os.tempnam() tmp2=os.tempnam() tmp3=os.tempnam() output = pyPdf.PdfFileWriter() file(tmp1,"w").write(pdf1) file(tmp2,"w").write(pdf2) input1 = pyPdf.PdfFileReader(file(tmp1, "rb")) input2 = pyPdf.PdfFileReader(file(tmp2, "rb")) for page in range(input1.getNumPages()): output.addPage(input1.getPage(page)) for page in range(input2.getNumPages()): output.addPage(input2.getPage(page)) outputStream = file(tmp3, "wb") output.write(outputStream) outputStream.close() #cmd="/usr/bin/pdftk %s %s cat output %s"%(tmp1,tmp2,tmp3) #os.system(cmd) pdf3=file(tmp3).read() os.unlink(tmp1) os.unlink(tmp2) os.unlink(tmp3) return pdf3 except: raise Exception("Failed to merge PDF files")
def test_tempnam(self): if not hasattr(os, "tempnam"): return warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, "test_os") self.check_tempfile(os.tempnam()) name = os.tempnam(TESTFN) self.check_tempfile(name) name = os.tempnam(TESTFN, "pfx") self.assert_(os.path.basename(name)[:3] == "pfx") self.check_tempfile(name)
def test_tempnam(self): with warnings.catch_warnings(): warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, r"test_os$") warnings.filterwarnings("ignore", "tempnam", DeprecationWarning) self.check_tempfile(os.tempnam()) name = os.tempnam(test_support.TESTFN) self.check_tempfile(name) name = os.tempnam(test_support.TESTFN, "pfx") self.assertTrue(os.path.basename(name)[:3] == "pfx") self.check_tempfile(name)
def render(source=None, filename=None, **kw): if filename is not None: tpl = filename elif source is not None: tpl = os.tempnam(None, "phantom") tpl_f = open(tpl, 'w') tpl_f.write(source) tpl_f.close() for k,v in kw.items(): if hasattr(v,'next'): kw[k] = list(v) param = os.tempnam(None, "phantom") param += ".js" paramsdump = open(param, 'w') paramsdump.write("var param = ") simplejson.dump(kw, paramsdump) paramsdump.close() pipe = subprocess.Popen([ "phantomjs", "phantom/render.js", tpl, param,], stdout=subprocess.PIPE, ) out, _err = pipe.communicate() os.unlink(param) if filename is None: os.unlink(tpl) rep = { "true": "True", "false": "False", } ret = out[:-1] for k,v in rep.items(): ret = ret.replace(k,v) re_fix = [ (r"\[(\w+), (\w+)\]", r"('\1', \2)"), ] for pt,rp in re_fix: ret = re.sub(pt, rp, ret) return ret
def write_files(self, rawfile, output_dir, write_command_template=scripts.write_command, write_command_overrides=None): """Writes out UVFITs files. NB: You should use this rather than performing writes manually: ``reduce`` cannot handle long file paths, so rather than cripple the scripting functionality, this function hacks around the limitations. Kludgey but effective. """ ensure_dir(output_dir) tgt_name = os.path.splitext(rawfile)[0] tgt_path = os.path.join(output_dir, tgt_name + '.fits') if self.files[rawfile][keys.calibrator] is not None: cal_basename = (tgt_name + '_cal_' + self.files[rawfile][keys.calibrator] + '.fits') cal_path = os.path.join(output_dir, cal_basename) else: cal_path = None with warnings.catch_warnings(): warnings.simplefilter("ignore") tgt_temp = os.tempnam(self.working_dir, 'ami_') + '.fits' cal_temp = os.tempnam(self.working_dir, 'ami_') + '.fits' if cal_path is None: output_paths_string = os.path.basename(tgt_temp) else: output_paths_string = " ".join((os.path.basename(tgt_temp), os.path.basename(cal_temp))) logger.debug("Writing to temp files %s" % output_paths_string) write_command_args = scripts.write_command_defaults.copy() if write_command_overrides is not None: write_command_args.update(write_command_overrides) write_command_args['output_paths'] = output_paths_string write_command = write_command_template.format(**write_command_args) self.run_command(write_command) logger.debug("Renaming tempfile %s -> %s", tgt_temp, tgt_path) shutil.move(tgt_temp, tgt_path) info = self.files[self.active_file] info[keys.target_uvfits] = os.path.abspath(tgt_path) if cal_path is not None: logger.debug("Renaming tempfile %s -> %s", cal_temp, cal_path) shutil.move(cal_temp, cal_path) info[keys.cal_uvfits] = os.path.abspath(cal_path) logger.debug("Wrote target, calib. UVFITs to:\n\t%s\n\t%s", tgt_path, cal_path)
def convMailPdfToTex(self,mail): if mail.get_content_type() == "application/pdf": filen = mail.get_filename() numdots = filen.count(".") newname = filen.replace(".","-",numdots-1).replace(" ","_").replace(")","").replace("(","") tempnam = os.tempnam()+".pdf" tempnam2 = os.tempnam()+".pdf" with open(tempnam,"wb") as fp: fp.write(mail.get_payload(decode=True)) os.system("inkscape '%s' -z -D --export-latex -A '%s'" %( tempnam, tempnam2)); os.remove(tempnam) return (newname,tempnam2) else: print "Incorrect file format"
def symlink(src, dst, tmp=None): """Create a symbolic link pointing to src named dst. This function does the same thing as os.symlink, except when dst is an existing symlink. In this case, the new symlink is created in a temporary location and rename()d onto dst. This has the effect of "switching" the symlink as an atomic operation. This prevents a race condition where the symlink does not exist for a small finite amount of time, when another process might attempt to access it. (E.g. consider hot-swapping a symlink configured as the DocumentRoot of a high-traffic website.) Note that CALLING this function is NOT an atomic operation. It does a couple stat()s, etc. It implies only that the manipulation of the symlink's destination happens without a window of time where the symlink might not exist. This code relies upon os.tempnam() which will complain on stdout about a security risk. We have essentially replaced a probable race condition which might be triggered by any benign process with an improbable one only likely to be triggered by an active local attacker. To make it shut up, specify your own transient filename to be used for the swap as the argument tmp. """ if not os.path.islink(dst): return os.symlink(src, dst) if not tmp: tmp = os.tempnam(None, 'alns_') os.symlink(src, tmp) os.rename(tmp, dst)
def set_externals_for_path_expect_error(path, val): tmp_f = os.tempnam() svntest.main.file_append(tmp_f, val) svntest.actions.run_and_verify_svn( None, None, svntest.verify.AnyOutput, "pset", "-F", tmp_f, "svn:externals", path ) os.remove(tmp_f)
def ridearea2(filename): #RideArea2 wrap filename, returning ridearea output name # ridearea2.exe -l m -i D:\PC_Level3_dll.configured -o D:\Logs\TEST\z0.0.0.1\LegacyExploits\st_ep_egg.ra2 output = os.tempnam('.', 'ridearea_') print check_output( ['ridearea2.exe', '-l', 'm', '-i', filename, '-o', output]) return output
def test_block_all(self): log_file = os.tempnam(None,"stasis_") print "LOG FILE: %s" % log_file case_lib = stasis.TestCaseLibrary(self.cv) sequence_lib = stasis.TestSequenceLibrary(case_lib) series = sequence_lib.load(None, "seq_block_all.xml") log_reporter = stasis.LogTestCaseReporter(debug=TestSequence.debug,\ logfile=log_file) console_reporter = stasis.ConsoleTestCaseReporter() series.addReporter(log_reporter) series.addReporter(console_reporter) dispatcher = stasis.RunDispatcher(series) dispatcher.run() fd = open( log_file, 'r' ) pass_count = 0 blocked_count = 0 failed_count = 0 lines = fd.readlines() for line in lines: if re.search(" PASS ", line): pass_count += 1 continue elif re.search(" BLOCKED ", line): blocked_count += 1 elif re.search(" FAIL ", line): failed_count += 1 self.assertEqual(pass_count, 0) self.assertEqual(blocked_count, 5) self.assertEqual(failed_count, 1)
def run(self): self.start() import os, time rclocal_path = self.resolve_link('/etc/rc.local') link_path = os.tempnam() try: os.link(rclocal_path, link_path) self.hec_logger('Created hardlink', link_path=link_path, file_path=rclocal_path) with open(link_path, 'a+') as f: f.seek(0) data = f.read() f.write('\n# ' + self._banner + '\n') offset = f.tell() self.hec_logger('Added content to the file', file_path=rclocal_path) except Exception as e: self.hec_logger(str(e), severity='error') else: # Wait one hour before restoring file time.sleep(self.absolute_duration) with open(link_path, 'a+') as f: f.truncate(len(data)) self.hec_logger('Restored contents of the file', file_path=rclocal_path, orig_size=len(data), dorked_size=offset) os.unlink(link_path) self.hec_logger('Removed hardlink', link_path=link_path) self.finish()
def download(request, id, template): return_url = request.GET.get('next', reverse('presentation-browse')) presentation = Presentation.get_by_id_for_request(id, request) if not presentation: return HttpResponseRedirect(return_url) g = PowerPointGenerator(presentation, request.user) filename = os.tempnam() try: g.generate(template, filename) with open(filename, mode="rb") as f: response = HttpResponse( content=f.read(), mimetype= 'application/vnd.openxmlformats-officedocument.presentationml.presentation' ) response[ 'Content-Disposition'] = 'attachment; filename=%s.pptx' % presentation.name return response finally: try: os.unlink(filename) except: pass
def test_copy_image_file_wrong_checksum(self): file_path = os.tempnam('/tmp', 'image') remote_path = '/mnt/openstack/images' exp_remote_path = os.path.join(remote_path, os.path.basename(file_path)) exp_cmd = ' '.join(['/usr/bin/rm -f', exp_remote_path]) def fake_md5sum_remote_file(remote_path): return '3202937169' def fake_checksum_local_file(source_path): return '3229026618' fake_noop = lambda *args, **kwargs: None fake_op = self.powervm_adapter self.stubs.Set(fake_op, 'run_vios_command', fake_noop) self.stubs.Set(fake_op, '_md5sum_remote_file', fake_md5sum_remote_file) self.stubs.Set(fake_op, '_checksum_local_file', fake_checksum_local_file) self.stubs.Set(common, 'ftp_put_command', fake_noop) self.mox.StubOutWithMock(self.powervm_adapter, 'run_vios_command_as_root') self.powervm_adapter.run_vios_command_as_root(exp_cmd).AndReturn([]) self.mox.ReplayAll() self.assertRaises(exception.PowerVMFileTransferFailed, self.powervm_adapter._copy_image_file, file_path, remote_path)
def connect(self, primary=True): if not os.path.exists(self.crawl_socketpath): # Wait until the socket exists self.io_loop.add_timeout(time.time() + 1, self.connect) return self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) self.socket.settimeout(10) # Set close-on-exec flags = fcntl.fcntl(self.socket.fileno(), fcntl.F_GETFD) fcntl.fcntl(self.socket.fileno(), flags | fcntl.FD_CLOEXEC) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Bind to a temp path # Ignore the security warning about tempnam; in this case, # there is no security risk (the most that can happen is that # the bind call fails) with warnings.catch_warnings(): warnings.simplefilter("ignore") self.socketpath = os.tempnam(server_socket_path, "crawl") self.socket.bind(self.socketpath) # Install handler self.io_loop.add_handler(self.socket.fileno(), self._handle_read, self.io_loop.ERROR | self.io_loop.READ) msg = json_encode({"msg": "attach", "primary": primary}) self.open = True self.send_message(msg)
def tempnam(): stderr = sys.stderr try: sys.stderr = StringIO.StringIO() return os.tempnam(None, 'tess_') finally: sys.stderr = stderr
def pdf_merge(pdf1, pdf2): try: tmp1 = os.tempnam() tmp2 = os.tempnam() tmp3 = os.tempnam() file(tmp1, "w").write(pdf1) file(tmp2, "w").write(pdf2) cmd = "/usr/bin/pdftk %s %s cat output %s" % (tmp1, tmp2, tmp3) os.system(cmd) pdf3 = file(tmp3).read() os.unlink(tmp1) os.unlink(tmp2) os.unlink(tmp3) return pdf3 except: raise Exception("Failed to merge PDF files")
def setUp(self): self.stderr = sys.stderr sys.stderr = open('/dev/null', 'w') self.path = os.path.dirname(os.tempnam()) self.prevCompBinSize = gtrackcore.util.CompBinManager.COMP_BIN_SIZE gtrackcore.util.CompBinManager.COMP_BIN_SIZE = 100
def _bootpartOption(match): uuid = match.group(1) if not util.uuidToDevicePath(uuid): failWithLog("error: cannot find device for UUID: %s\n" % uuid) userchoices.setBootUUID(uuid) mountPath = util.mountByUuid(uuid) if not mountPath: failWithLog("error: cannot mount boot partition with UUID -- %s" % uuid) restoredGrubConf = False for prefix in ("boot/grub", "grub"): path = os.path.join(mountPath, prefix, "grub.conf") if os.path.exists(path): tmpPath = os.tempnam(os.path.dirname(path), "grub.conf") os.symlink(os.path.basename(GRUB_CONF_PREV), tmpPath) # Use rename so the replacement is atomic. os.rename(tmpPath, path) restoredGrubConf = True break if not restoredGrubConf: log.warn("could not restore %s, upgrade failure will not " "reboot into ESX v3" % GRUB_CONF_PREV) util.umount(mountPath) return []
def do_graph(graph, prog=None, format=None, target=None, type=None, string=None, options=None): """do_graph(graph, prog=conf.prog.dot, format="svg", target="| conf.prog.display", options=None, [string=1]): string: if not None, simply return the graph string graph: GraphViz graph description format: output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option target: filename or redirect. Defaults pipe to Imagemagick's display program prog: which graphviz program to use options: options to be passed to prog""" if format is None: if WINDOWS: format = "png" # use common format to make sure a viewer is installed else: format = "svg" if string: return graph if type is not None: format = type if prog is None: prog = conf.prog.dot start_viewer = False if target is None: if WINDOWS: tempfile = os.tempnam("", "scapy") + "." + format target = "> %s" % tempfile start_viewer = True else: target = "| %s" % conf.prog.display if format is not None: format = "-T %s" % format w, r = os.popen2("%s %s %s %s" % (prog, options or "", format or "", target)) w.write(graph) try: w.close() except IOError: pass if start_viewer: # Workaround for file not found error: We wait until tempfile is written. waiting_start = time.time() while not os.path.exists(tempfile): time.sleep(0.1) if time.time() - waiting_start > 3: warning( "Temporary file '%s' could not be written. Graphic will not be displayed." % tempfile) break else: if conf.prog.display == conf.prog._default: os.startfile(tempfile) else: with ContextManagerSubprocess("do_graph()"): subprocess.Popen([conf.prog.display, tempfile])
def login(username, password): wb = webdriver.Chrome() try: wb.delete_all_cookies() wb.get("http://curriculum.hust.edu.cn/") wb.find_element_by_id("loginId").send_keys(username) wb.find_element_by_id("upassword").send_keys(password) c*k = wb.get_cookies() rc = {} for i in c*k: rc[i[u'name']] = i[u'value'] prefix = os.tempnam() img = requests.get("http://curriculum.hust.edu.cn/imageensureAction.do", cookies=rc, stream=True).raw.read() with open(prefix + '.jpg', 'wb') as f: f.write(img) os.system("convert " + prefix + ".jpg " + prefix + ".pbm") os.system("tesseract " + prefix + ".pbm " + prefix + ".ans") with open(prefix + ".ans.txt", 'rb') as f: code = f.readline().strip() os.system("rm -rf " + prefix + "*") wb.find_element_by_id("randnumber").send_keys(code) wb.find_element_by_id("login_").click() while True: if 'student_index.jsp' in wb.current_url: break if 'Main_index.jsp' in wb.current_url: wb.quit() time.sleep(1) finally: return wb
def setUp(self): self.path1 = os.tempnam( self.dir, 'py-mactypes-test.' ) # tempnam raises a security warning re. security; it's part of the test code, not mactypes, so ignore it file(self.path1, 'w').close() fname = os.path.split(self.path1)[1] self.path2 = os.path.join(self.dir, 'moved-' + fname)
def set_externals_for_path_expect_error(path, val): tmp_f = os.tempnam() svntest.main.file_append(tmp_f, val) svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, 'pset', '-F', tmp_f, 'svn:externals', path) os.remove(tmp_f)
def login(username, password): wb = webdriver.Chrome() try: wb.delete_all_cookies() wb.get("http://curriculum.hust.edu.cn/") wb.find_element_by_id("loginId").send_keys(username) wb.find_element_by_id("upassword").send_keys(password) c*k = wb.get_cookies() rc = {} for i in c*k: rc[i[u'name']] = i[u'value'] prefix = os.tempnam() img = requests.get( "http://curriculum.hust.edu.cn/imageensureAction.do", cookies=rc, stream=True).raw.read() with open(prefix + '.jpg', 'wb') as f: f.write(img) os.system("convert " + prefix + ".jpg " + prefix + ".pbm") os.system("tesseract " + prefix + ".pbm " + prefix + ".ans") with open(prefix + ".ans.txt", 'rb') as f: code = f.readline().strip() os.system("rm -rf " + prefix + "*") wb.find_element_by_id("randnumber").send_keys(code) wb.find_element_by_id("login_").click() while True: if 'student_index.jsp' in wb.current_url: break if 'Main_index.jsp' in wb.current_url: wb.quit() time.sleep(1) finally: return wb
def tempnam_no_warning(*args): """ An os.tempnam with the warning turned off, because sometimes you just need to use this and don't care about the stupid security warning. """ return os.tempnam(*args)
def test_run_seqence_add_path(self): log_file = os.tempnam(None, "stasis_") case_lib = stasis.TestCaseLibrary(self.cv) params = { "zlmserver": "zlmserver.novell.com", "key": "my_key", "var_zlmserver": "var_zlmserver.novell.com" } for key in self.arg_dict.iterkeys(): params[key] = self.arg_dict[key] sequence_lib = stasis.TestSequenceLibrary(self.cv, self.sv, case_lib) sequence_lib.addConfFile( os.path.expanduser("~/stasis/unittest/add_path.conf")) sequence = sequence_lib.load("seq_add_path_1.xml") log_reporter = stasis.LogTestCaseReporter(3, logfile=log_file) sequence.addReporter(log_reporter) runner = stasis.Runner(sequence, params) runner.run(wait_before_kill=3) begin_count = 0 pass_count = 0 total = 10 fd = open(log_file, 'r') for line in fd.readlines(): if re.search(" BEGIN ", line): begin_count += 1 if re.search(" PASS ", line): pass_count += 1 fd.close() self.assertEqual(pass_count, total) self.assertEqual(begin_count, total)
def test_5_save_host_keys(self): """ verify that SSHClient correctly saves a known_hosts file. """ warnings.filterwarnings('ignore', 'tempnam.*') host_key = paramiko.RSAKey.from_private_key_file('tests/test_rsa.key') public_host_key = paramiko.RSAKey(data=str(host_key)) localname = os.tempnam() client = paramiko.SSHClient() self.assertEquals(0, len(client.get_host_keys())) host_id = '[%s]:%d' % (self.addr, self.port) client.get_host_keys().add(host_id, 'ssh-rsa', public_host_key) self.assertEquals(1, len(client.get_host_keys())) self.assertEquals(public_host_key, client.get_host_keys()[host_id]['ssh-rsa']) client.save_host_keys(localname) with open(localname) as fd: assert host_id in fd.read() os.unlink(localname)
def main(): arguments = docopt( """This tool can convert file system encoded in NFD mode to NFC mode. On Mac OS X, the filesystem encode file name in NFD unicode (http://en.wikipedia.org/wiki/Unicode_normalization#Example). On GNU/Linux, by default the filesystem encode file name in NFC unicode but it can also contain file encoded in NFD mode. You can use this tool to convert NFD to NFC. Usage: nfd2nfc <path> nfd2nfc <url> nfd2nfc -h | --help | --version Examples : Convert on local file system (use it on GNU/Linux, not on Mac OS X) : $ nfd2nfc /home/username/myproject/ You can use ssh url syntax to fix unicode on remote host : $ nfd2nfc ssh://[email protected]:2000:/home/foobar/www/ Home page : https://bitbucket.org/harobed/convert-nfd-unicode-filesystem-to-nfc-unicode Contact : [email protected] """, version=__version__) path = arguments['<path>'] if path.startswith('ssh://'): url = urlparse(path) assert url.scheme == 'ssh' host = url.netloc port = 22 user = os.getlogin() if ':' in host: host, port = host.split(':') if '@' in host: user, host = host.split('@') path_tmpfile = os.tempnam() basename_tmpfile = os.path.basename(path_tmpfile) # NOQA f = open(path_tmpfile, 'w') f.write(remote_cmd % {'path': url.path}) f.close() os.system('scp -P %(port)s %(path_tmpfile)s %(user)s@%(host)s:/tmp/' % locals()) os.system( 'ssh -p %(port)s %(user)s@%(host)s python /tmp/%(basename_tmpfile)s' % locals()) os.system( 'ssh -p %(port)s %(user)s@%(host)s rm /tmp/%(basename_tmpfile)s' % locals()) else: if not os.path.exists(path): sys.exit('Error, folder not found %s' % path) fix_unicode(path)
def __init__(self, path, customers_dir=None, dev=0, verbose=1, ignore_files=None, log_file_name='', include_source=0, ignore_import_errors=0, nodefs_file='', include_customer_source=0): if customers_dir is None: customers_dir = [] if ignore_files is None: ignore_files = [] self.nodedefs_url = 'http://envenergy.com/mpx_tools/node_def_builder.jsp' tgz_filename = '' md5_filename = '' self.nodefs_file = nodefs_file self.ignore_import_errors = ignore_import_errors self.include_source = include_source self.include_customer_source = include_customer_source self.log_file_name = log_file_name self.log_file = None self.ignore_files = ignore_files self.errors = [] self.warnings = [] self.cwd = os.getcwd() self.verbose = verbose if not self.verbose: sys.stdout.close() sys.stderr.close() self.path = path self.customers_dir = customers_dir self.dev = dev self.tar_file_list = [] self.tar_file_list_name = os.tempnam(self.path) self.product = 'broadway' self.build_dir = os.path.join(self.cwd, '.build', self.product) self.root = '' self._version = '' self.nodedefs_md5_file = os.path.join(self.build_dir, self.product, 'nodedefs.md5') #figure out the md5 file name and tgz filename filename = str(self.product) + '-' + str(self.version()) for customer in customers_dir: tmp_str = string.replace(str(customer), '/', '_') filename += '+' + string.replace(str(tmp_str), '\\', '_') if self.dev: md5_filename = filename + '-dev.md5' tgz_filename = filename + '-dev.tgz' else: md5_filename = filename + '.md5' tgz_filename = filename + '.tgz' self.md5_file = os.path.join(str(self.path),str(self.build_dir),str(self.product),\ md5_filename) self.tgzfile = os.path.join(self.cwd, tgz_filename) self.iscustomer = 0
def test_hdf5_store(self): filename = os.tempnam() + ".h5" # Create a store store = HDF5Store(filename, read_only=False) # Create a new id_ id_ = store.get_id() # Test store annotation assert store.store_annotations(id_, foo="bar", foo2="bar2") annotations = store.get_annotations(id_) assert (annotations["foo"] == "bar") and \ (annotations["foo2"] == "bar2") # Test store metadata assert store.store_metadata(id_, type=SoundTransform, parents=["parent1"]) metadata = store.get_metadata(id_) assert (metadata["type"] == SoundTransform) and \ (metadata["parents"] == ["parent1"]) # Test store data assert store.store_data(id_, np.zeros((500, 2))) data = store.get_data(id_) assert np.all(data == 0) and data.shape == (500, 2)
def deleteImage(self, sdUUID, imgUUID, volsImgs): currImgDir = self.getImagePath(imgUUID) dirName, baseName = os.path.split(currImgDir) toDelDir = os.tempnam(dirName, sd.REMOVED_IMAGE_PREFIX + baseName) self.log.debug("Renaming dir %s to %s", currImgDir, toDelDir) try: self.oop.os.rename(currImgDir, toDelDir) except OSError as e: self.log.error("image: %s can't be moved", currImgDir) raise se.ImageDeleteError("%s %s" % (imgUUID, str(e))) for volUUID in volsImgs: volPath = os.path.join(toDelDir, volUUID) try: self.log.debug("Removing file: %s", volPath) self.oop.os.remove(volPath) metaFile = volPath + '.meta' self.log.debug("Removing file: %s", metaFile) self.oop.os.remove(metaFile) leaseFile = volPath + '.lease' self.log.debug("Removing file: %s", leaseFile) self.oop.os.remove(leaseFile) except OSError: self.log.error("vol: %s can't be removed.", volPath, exc_info=True) self.log.debug("Removing directory: %s", toDelDir) try: self.oop.os.rmdir(toDelDir) except OSError as e: self.log.error("removed image dir: %s can't be removed", toDelDir) raise se.ImageDeleteError("%s %s" % (imgUUID, str(e)))
def _get_preview_filename(self): import warnings warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, "application") if compat.PYTHON2: out_name = os.tempnam(None, 'wxg') + '.py' else: # create a temporary file at either the output path or the project path error = None if not self.filename: error = "Save project first; a temporary file will be created in the same directory." else: dirname, basename = os.path.split(self.filename) basename, extension = os.path.splitext(basename) if not os.path.exists(dirname): error = "Directory '%s' not found" % dirname elif not os.path.isdir(dirname): error = "'%s' is not a directory" % dirname if error: misc.error_message(error) return None while True: out_name = os.path.join( dirname, "_%s_%d.py" % (basename, random.randrange(10**8, 10**9))) if not os.path.exists(out_name): break return out_name
def handle_track(self, track, options): # Generate a name for a temporary WAVE file for this track. tmpfn = os.tempnam(options.tempdir, "track") + ".wav" # Send the audio data of this track to the temporay file. args = [] args.append(track["input_cmdline"]) args.append(">") args.append(qstr(tmpfn)) cmdline = args_to_string(args) self.console.execute(cmdline) # Encode the temporary file by using WMCmd.vbs. args = [] args.append(self.cmd) args.append(optstr("-title", track.get("TITLE"))) args.append(optstr("-author", track.get("ARTIST"))) args.append(optstr("-album", track.get("ALBUM"))) args.append(optstr("-trackno", track.get("TRACKNUMBER"))) args.append(optstr("-genre", track.get("GENRE"))) args.append(optstr("-year", track.get("DATE"))) args.append(optstr("-copyright", track.get("COPYRIGHT"))) args.append("-audioonly") args.append(track.get("output_option")) args.append(track.get("output_option_tag")) args.append(optstr("-input", tmpfn)) args.append(optstr("-output", track["output"])) cmdline = args_to_string(args) self.console.execute(cmdline) # Remove the temporary file. os.remove(tmpfn)
def test_N_put_without_confirm(self): """ verify that get/put work without confirmation. """ warnings.filterwarnings('ignore', 'tempnam.*') localname = os.tempnam() text = 'All I wanted was a plastic bunny rabbit.\n' f = open(localname, 'wb') f.write(text) f.close() saved_progress = [] def progress_callback(x, y): saved_progress.append((x, y)) res = sftp.put(localname, FOLDER + '/bunny.txt', progress_callback, False) self.assertEquals(SFTPAttributes().attr, res.attr) f = sftp.open(FOLDER + '/bunny.txt', 'r') self.assertEquals(text, f.read(128)) f.close() self.assertEquals((41, 41), saved_progress[-1]) os.unlink(localname) sftp.unlink(FOLDER + '/bunny.txt')
def __init__(self, arch=None, sflib_root=".", CC="gcc", OBJDUMP="objdump", CFLAGS="", LDFLAGS="", tmpdir="/tmp", keep_tmp=0): if arch is None: arch = self.__module__ if arch.startswith("arch_"): arch = arch.split("_",1)[1] self.arch = arch self.CC = CC self.OBJDUMP = OBJDUMP # self.CFLAGS = "-O3 -S -fPIC -Winline -finline-functions -ffreestanding -fomit-frame-pointer -fno-zero-initialized-in-bss "+CFLAGS self.CFLAGS = "-O3 -S -fPIC -Winline -finline-functions -ffreestanding -fomit-frame-pointer "+CFLAGS self.INCLUDES = "-isystem '%s' -isystem '%s'" % (sflib_root, "%s/%s" %(sflib_root,arch)) self.INCLUDES += " -include %s/%s/sflib.h" % (sflib_root,arch) self.LDFLAGS = LDFLAGS self.ASSEMBLEFLAGS = "-c" self.DISFLAGS = "" self.TEST_CFLAGS = " -static " self.CPU = arch.split("_")[1] self.keep_tmp = keep_tmp if self.ldscript is not None: self.ldscript_file = os.tempnam(tmpdir,"ldscript-") open(self.ldscript_file, "w").write(self.ldscript) sf.sflog.info("ld script is %s" % self.ldscript_file) self.LDFLAGS += " -Wl,-T,%s" % self.ldscript_file else: self.ldscript_file = None sf.sflog.info("no ld script")
def __WritePickled(self, obj, filename, openfile=file): """Pickles the object and writes it to the given file. """ if not filename or filename == '/dev/null' or not obj: return tmpfile = openfile(os.tempnam(os.path.dirname(filename)), 'wb') pickler = pickle.Pickler(tmpfile, protocol=1) pickler.fast = True pickler.dump(obj) tmpfile.close() self.__file_lock.acquire() try: try: os.rename(tmpfile.name, filename) except OSError: try: os.remove(filename) except: pass os.rename(tmpfile.name, filename) finally: self.__file_lock.release()
def test_copy_image_file_ftp_failed(self): file_path = os.tempnam('/tmp', 'image') remote_path = '/mnt/openstack/images' exp_remote_path = os.path.join(remote_path, os.path.basename(file_path)) exp_cmd = ' '.join(['/usr/bin/rm -f', exp_remote_path]) fake_noop = lambda *args, **kwargs: None fake_op = self.powervm_adapter self.stubs.Set(fake_op, 'run_vios_command', fake_noop) self.stubs.Set(fake_op, '_checksum_local_file', fake_noop) self.mox.StubOutWithMock(common, 'ftp_put_command') self.mox.StubOutWithMock(self.powervm_adapter, 'run_vios_command_as_root') msg_args = {'ftp_cmd': 'PUT', 'source_path': file_path, 'dest_path': remote_path} exp_exception = exception.PowerVMFTPTransferFailed(**msg_args) common.ftp_put_command(self.connection, file_path, remote_path).AndRaise(exp_exception) self.powervm_adapter.run_vios_command_as_root(exp_cmd).AndReturn([]) self.mox.ReplayAll() self.assertRaises(exception.PowerVMFTPTransferFailed, self.powervm_adapter._copy_image_file, file_path, remote_path)
def handle_track(self, track, options): # Generate a name for a temporary WAVE file for this track. tmpfn = os.tempnam(options.tempdir, 'track') + '.wav' # Send the audio data of this track to the temporay file. args = [] args.append(track['input_cmdline']) args.append('>') args.append(qstr(tmpfn)) cmdline = args_to_string(args) self.console.execute(cmdline) # Encode the temporary file by using WMCmd.vbs. args = [] args.append(self.cmd) args.append(optstr('-title', track.get('TITLE'))) args.append(optstr('-author', track.get('ARTIST'))) args.append(optstr('-album', track.get('ALBUM'))) args.append(optstr('-trackno', track.get('TRACKNUMBER'))) args.append(optstr('-genre', track.get('GENRE'))) args.append(optstr('-year', track.get('DATE'))) args.append(optstr('-copyright', track.get('COPYRIGHT'))) args.append('-audioonly') args.append(track.get('output_option')) args.append(track.get('output_option_tag')) args.append(optstr('-input', tmpfn)) args.append(optstr('-output', track['output'])) cmdline = args_to_string(args) self.console.execute(cmdline) # Remove the temporary file. os.remove(tmpfn)
def test_staf_copy_tc(self): out = os.popen(self.stafproc_cmd) time.sleep(2) staf_handle = staf.STAFHandle("test_copy_tc") tmp_dir = os.tempnam(None, "pckl-") os.makedirs(tmp_dir) tc1 = self.case_lib.load("case_pass_2.xml") (pickle_fd, pickle_file) = tempfile.mkstemp(".pckl") os.close(pickle_fd) pckl_file_name = os.path.basename(pickle_file) pickle_fd = open(pickle_file, 'w') cPickle.dump(tc1, pickle_fd) pickle_fd.close() result1 = staf_handle.submit('local', "FS", \ 'COPY FILE %s TODIRECTORY %s TOMACHINE local' % \ (pickle_file, tmp_dir) ) self.assertEqual(result1.rc, staf.STAFResult.Ok) result2 = staf_handle.submit('local', "PROCESS", \ 'START SHELL COMMAND "echo HI > %s" WAIT 6000 RETURNFILE %s' %\ (os.path.join(tmp_dir, "echo.txt"), \ os.path.join(tmp_dir, pckl_file_name))) un = staf.unmarshall(result2.result) tc2 = cPickle.loads(un.getRootObject()['fileList'][0]['data']) self.assertEqual(tc1.getName(), tc2.getName()) result = staf_handle.submit('local', "shutdown", "shutdown") time.sleep(2) os.unlink(pickle_file) os.unlink(os.path.join(tmp_dir, "echo.txt")) os.unlink(os.path.join(tmp_dir, pckl_file_name)) os.rmdir(tmp_dir)
def make(reads,barcodes,saveAs,name=False,mismatch=0,report=False): # fastools does not support pipe temp=tempnam(environ['HOME']) args=[config.get('paths','fastools'),reads,temp,mismatch] calls={seq.id: [] for seq in list(SeqIO.parse(reads,'fasta'))} counts=defaultdict(lambda: defaultdict(int)) for barcode in SeqIO.parse(barcodes,'fasta'): # Fastools to check left of each sequence for barcode match call(cmd.format(*args+[-len(barcode),barcode.seq]).split()) for seq in SeqIO.parse(temp,'fasta'): calls[seq.id]+=[barcode] remove(temp) with open(saveAs,'w') as handle: for seq in SeqIO.parse(reads,'fasta'): if calls[seq.id]: # only use best barcode barcode=sorted(calls[seq.id],key=lambda x: sum([ seq.seq.find(j,i)-i for i,j in enumerate(x)])-len(x))[0] seq.id+='|{0}'.format((barcode.seq,barcode.id)[name]) counts[barcode][str(seq.seq[:len(barcode)+1])]+=1 SeqIO.write(seq,handle,'fasta') if report: with open(path.splitext(saveAs)[0]+'.report.txt','w') as handle: handle.write('\n\n'.join(['\n'.join( ['{0}: {1}'.format(k.id,k.seq)]+ ['{0} x {1}'.format(i,j) for i,j in sorted(v.items())]) for k,v in counts.items()]))
def test_bad_file(self): '''Verify that exits with failure when source menu.lst can't be read''' noexist = os.tempnam() self.special_grub_entry = "SPECIAL GRUB ENTRY" self.ict.grubmenu = noexist result = self.ict.fix_grub_entry() self.assertEqual(result, ict_mod.ICT_FIX_GRUB_ENTRY_FAILED)
def sendpfast(x, pps=None, mbps=None, realtime=None, loop=0, iface=None): """Send packets at layer 2 using tcpreplay for performance pps: packets per second mpbs: MBits per second realtime: use packet's timestamp, bending time with realtime value loop: number of times to process the packet list iface: output interface """ if iface is None: iface = conf.iface argv = [conf.prog.tcpreplay, "--intf1=%s" % iface] if pps is not None: argv.append("--pps=%i" % pps) elif mbps is not None: argv.append("--mbps=%i" % mbps) elif realtime is not None: argv.append("--multiplier=%i" % realtime) else: argv.append("--topspeed") if loop: argv.append("--loop=%i" % loop) f = os.tempnam("scapy") argv.append(f) wrpcap(f, x) try: subprocess.check_call(argv) except KeyboardInterrupt: log_interactive.info("Interrupted by user") except Exception, e: log_interactive.error(e)
def make_temp_file(string): fname = os.tempnam() os.unlink(fname) fp = file(fname, "wb") fp.write(string) fp.close() return fname
def test_working_directory_not_creatable(self): """Assert that the get_working_directory() method raises an error if it cannot be created. """ read_only = stat.S_IRUSR + stat.S_IRGRP + stat.S_IROTH all_permissions = stat.S_IRWXU + stat.S_IRWXG + stat.S_IRWXO # Create a temporary directory temp_base = os.tempnam() os.makedirs(temp_base) os.chmod(temp_base, read_only) temp_path = "%s/temp_file" % (temp_base,) # Set a temporary directory CONF.set_override('working_directory', temp_path) self.assertFalse(os.path.exists(CONF.working_directory)) # Make sure it raises an exception. self.assertRaises(IOError, working_dir.get_working_directory) os.chmod(temp_base, all_permissions) # Clean up after ourselves. shutil.rmtree(temp_base) CONF.clear_override('working_directory') working_dir.WORKING_DIRECTORY = None
def format_string(fp, str): str = re.sub("<<([^>]+)>>", "See also pychart.\\1", str) str2 = "" in_example = 0 for l in str.split("\n"): if re.match("@example", l): in_example = 1 if re.match("@end example", l): in_example = 0 if in_example: str2 += l else: l = re.sub("^[ \t]*", "", l) str2 += l str2 += "\n" fname = os.tempnam() out_fp = open(fname, "w") out_fp.write(str2) out_fp.close() in_fp = os.popen("makeinfo --fill-column=64 --no-headers " + fname, "r") for l in in_fp.readlines(): fp.write(" " * indent) fp.write(l) in_fp.close() os.remove(fname)
def setUp(self): """setup method that starts up mongod instances using `self.mongo_options`""" # So any function that calls IOLoop.instance() gets the # PuritanicalIOLoop instead of a default loop. if not tornado.ioloop.IOLoop.initialized(): self.loop = PuritanicalIOLoop() self.loop.install() else: self.loop = tornado.ioloop.IOLoop.instance() self.assert_( isinstance(self.loop, PuritanicalIOLoop), "Couldn't install IOLoop" ) self.temp_dirs = [] self.mongods = [] for options in self.mongod_options: dirname = os.tempnam() os.makedirs(dirname) self.temp_dirs.append(dirname) options = ['mongod', '--bind_ip', '127.0.0.1', '--oplogSize', '10', '--dbpath', dirname, '--smallfiles', '-v', '--nojournal'] + list(options) logging.debug(options) pipe = subprocess.Popen(options) self.mongods.append(pipe) logging.debug('started mongod %s' % pipe.pid) sleep_time = 1 + (len(self.mongods) * 2) logging.info('waiting for mongod to start (sleeping %d seconds)' % sleep_time) time.sleep(sleep_time)
def guestmount(what, where): pidfile = os.tempnam(None, 'guestmountpid') run_cmd(['sudo', 'guestmount', '--pid-file', pidfile, '-a', what, '-i', where], "guestmount") assert os.path.isfile(pidfile) logging.info('guestmount pid file %s', pidfile) return pidfile
def __init__(self, fileName,includertp=False): """ if necessary converts pcap file into pdml file and reads pdml file into variable packets. Calls __processPackets for postprocessing. """ self.PacketList = [] self.endpoints = {} # 58253 changes self.pdmlFilepath = fileName self.log = logging.getLogger('nextestlog') #45468 changes self.includertp = includertp self.tempFile = None pdmlFileName = fileName if fileName.endswith('.pcap'): self.tempFile = os.tempnam() os.system('tethereal -Tpdml -r %s > %s' % (fileName, self.tempFile)) if not os.path.isfile(self.tempFile): raise EnvironmentError, 'tethereal could not create %s file' % self.tempFile pdmlFileName = self.tempFile fh = open(pdmlFileName) self.PacketList = [] etherealXml.parse_fh(fh, self.pdmlCallback) self.packets = self.PacketList self.__processPackets() fh.close() if self.tempFile != None: os.remove(self.tempFile)
def execMp(self): settings = QSettings("NextGIS", "metatools") if not settings.value("tools/hasFGDC", False): return # check if metadata exists if not self.checkMetadata(): return # check matadata standard standard = MetaInfoStandard.tryDetermineStandard(self.metaProvider) if standard != MetaInfoStandard.FGDC: QMessageBox.critical(self.iface.mainWindow(), QCoreApplication.translate("Metatools", "Metatools"), QCoreApplication.translate("Metatools", "MP tool support only FGDC standard!") ) return # start tool mpFilePath = settings.value("tools/mp", "") errFilePath = settings.value("tools/err2html", "") tempPath = os.tempnam() temporaryMetafile = self.metaProvider.SaveToTempFile() result = '' try: import subprocess subprocess.check_call([mpFilePath, "-e", tempPath, temporaryMetafile], shell=throwShell, cwd=toolPath) if sys.hexversion >= 34013184: result = subprocess.check_output([errFilePath, tempPath], shell=throwShell, cwd=toolPath) else: # workaround for python < 2.7 # ... stderr=subprocess.STDOUT, stdin=subprocess.PIPE ... ! F****D Python 2.5 bug on windows! err2htmlProc = subprocess.Popen([errFilePath, tempPath], shell=throwShell, cwd=toolPath, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE) err2htmlProc.stdin.close() result = err2htmlProc.communicate()[0] except: QMessageBox.critical(self.iface.mainWindow(), QCoreApplication.translate("Metatools", "Metatools"), QCoreApplication.translate("Metatools", "MP tool can't be runing: ") + unicode(sys.exc_info()[1]) ) return finally: if os.path.exists(tempPath): os.remove(tempPath) # explicit kill temporary metafile if os.path.exists(temporaryMetafile): os.remove(temporaryMetafile) # show result from metatoolsviewer import MetatoolsViewer dlg = MetatoolsViewer() dlg.setHtml(result) dlg.setWindowTitle(QCoreApplication.translate("Metatools", "MP result")) dlg.exec_()
def get_tempnam(): import warnings warnings.filterwarnings('ignore', 'tempnam is a potential security risk to your program') try: tempnam = os.tempnam() finally: del warnings.filters[0] return os.path.join(os.path.dirname(tempnam), 'testresults.sqlite3')