def generate_minion_keys(self): #XXX TODO: Replace M2Crypto with PyCrypto # see: https://github.com/saltstack/salt/pull/1112/files # generate keys keyname = self.get_keyname() if not keyname: LOG.error("Must specify salt_id or hostname") return False gen = RSA.gen_key(2048, 1, callback=lambda x, y, z: None) pubpath = os.path.join(self.pki_dir, '{0}.pub'.format(keyname)) gen.save_pub_key(pubpath) LOG.info("public key {0}".format(pubpath)) if self.config.get('save_keys'): cumask = os.umask(191) gen.save_key(os.path.join(self.pki_dir, '{0}.pem'.format(keyname)), None) os.umask(cumask) # public key _pub = TemporaryFile() bio_pub = BIO.File(_pub) m2.rsa_write_pub_key(gen.rsa, bio_pub._ptr()) _pub.seek(0) self.config['public_key'] = self.public_key = _pub.read() self.config['formatted_public_key'] = '\n'.join( " {0}".format(k) for k in self.public_key.split('\n')) # private key _pem = TemporaryFile() bio_pem = BIO.File(_pem) gen.save_key_bio(bio_pem, None) _pem.seek(0) self.config['private_key'] = self.private_key = _pem.read() self.config['formatted_private_key'] = '\n'.join( " {0}".format(k) for k in self.private_key.split('\n')) return True
def save_res_file(zip_path, result_obj, confirm_overwrite=False): """ save results of a pose error metric (pe_metric) to a zip file :param zip_path: path to zip file :param result_obj: evo.core.result.Result instance :param confirm_overwrite: whether to require user interaction to overwrite existing files """ from tempfile import TemporaryFile logger.debug("Saving results to " + zip_path + "...") if confirm_overwrite and not user.check_and_confirm_overwrite(zip_path): return with zipfile.ZipFile(zip_path, 'w') as archive: archive.writestr("info.json", json.dumps(result_obj.info)) archive.writestr("stats.json", json.dumps(result_obj.stats)) for name, array in result_obj.np_arrays.items(): tmp_file = TemporaryFile() np.save(tmp_file, array) tmp_file.seek(0) archive.writestr("{}.npz".format(name), tmp_file.read()) tmp_file.close() for name, traj in result_obj.trajectories.items(): tmp_file = TemporaryFile() if type(traj) is PosePath3D: fmt_suffix = ".kitti" write_kitti_poses_file(tmp_file, traj) elif type(traj) is PoseTrajectory3D: fmt_suffix = ".tum" write_tum_trajectory_file(tmp_file, traj) else: raise FileInterfaceException( "unknown format of trajectory {}".format(name)) tmp_file.seek(0) archive.writestr("{}{}".format(name, fmt_suffix), tmp_file.read()) tmp_file.close()
def proxy_stdf(): """ Circulate stdout/stderr via a proper file object. Designed to work around a problem where Python nose replaces sys.stdout/stderr with a custom 'Tee' object that is not a file object (compatible) and thus causes a crash with Popen. """ tmp_stdout = sys.stdout try: tmp_stdout.fileno() except Exception: tmp_stdout = TemporaryFile() tmp_stderr = sys.stderr try: tmp_stderr.fileno() except Exception: tmp_stderr = TemporaryFile() try: yield tmp_stdout, tmp_stderr finally: if tmp_stdout != sys.stdout: tmp_stdout.seek(0) sys.stdout.write(tmp_stdout.read().decode()) if tmp_stderr != sys.stderr: tmp_stderr.seek(0) sys.stderr.write(tmp_stderr.read().decode())
def htar(*args): """Run :command:`htar` with arguments. Parameters ---------- args : :func:`tuple` Arguments to be passed to :command:`htar`. Returns ------- :func:`tuple` The standard output and standard error from :command:`htar`. Raises ------ KeyError If the :envvar:`HPSS_DIR` environment variable has not been set. """ outfile = TemporaryFile() errfile = TemporaryFile() path = get_hpss_dir() command = [os.path.join(path, 'htar')] + list(args) status = call(command, stdout=outfile, stderr=errfile) outfile.seek(0) out = outfile.read() errfile.seek(0) err = errfile.read() outfile.close() errfile.close() return (out.decode('utf8'), err.decode('utf8'))
def run(self, instance: FilePath, solver: SolverConfig, timeout=None) -> TestResult: result = ExecutionTestResult(instance, solver) stdout_mapping = TemporaryFile() stderr_mapping = TemporaryFile() solver_result = solver.run(instance, stdout_mapping, stderr_mapping, timeout) if solver_result == SolverResult.TIMEOUT: result.timeout() return result stdout_mapping.seek(0) stderr_mapping.seek(0) output = stdout_mapping.read().decode() + stderr_mapping.read().decode( ) result.setSolverResult(solver_result, output.strip()) # Get iterations from solver output iterations = 0 for match in re.finditer(r"Count\s*:\s*(\d+)", output): iterations += int(match.group(1)) if iterations > 0: result.setIterations(iterations) return result
def run_process(cmd, timeout=10): """ run process with timeout """ if type(cmd) == bytes: cmd = cmd.decode('utf-8') if type(cmd) == str: cmd = cmd.split() if not timeout: subprocess.Popen(cmd) return None, None, None try: out = TemporaryFile() err = TemporaryFile() prc = subprocess.Popen(cmd, stdout=out, stderr=err) except: LOG.exception('error in run_process %s' % cmd) return -1, None, None starttime = time.time() while 1: if time.time() - starttime > timeout: LOG.error('run command %s timeout' % ' '.join(cmd)) try: kill_prc(prc) except: pass return -1, None, None if not alive(prc): out.flush() err.flush() out.seek(0) err.seek(0) return prc.poll(), out.read().decode('utf-8'), err.read().decode( 'utf-8') time.sleep(0.1)
def generate_minion_keys(self): # XXX TODO: Replace M2Crypto with PyCrypto # see: https://github.com/saltstack/salt/pull/1112/files # generate keys keyname = self.get_keyname() if not keyname: LOG.error("Must specify salt_id or hostname") return False gen = RSA.gen_key(2048, 1, callback=lambda x, y, z: None) pubpath = os.path.join(self.pki_dir, "{0}.pub".format(keyname)) gen.save_pub_key(pubpath) LOG.info("public key {0}".format(pubpath)) if self.config.get("save_keys"): cumask = os.umask(191) gen.save_key(os.path.join(self.pki_dir, "{0}.pem".format(keyname)), None) os.umask(cumask) # public key _pub = TemporaryFile() bio_pub = BIO.File(_pub) m2.rsa_write_pub_key(gen.rsa, bio_pub._ptr()) _pub.seek(0) self.config["public_key"] = self.public_key = _pub.read() self.config["formatted_public_key"] = "\n".join(" {0}".format(k) for k in self.public_key.split("\n")) # private key _pem = TemporaryFile() bio_pem = BIO.File(_pem) gen.save_key_bio(bio_pem, None) _pem.seek(0) self.config["private_key"] = self.private_key = _pem.read() self.config["formatted_private_key"] = "\n".join(" {0}".format(k) for k in self.private_key.split("\n")) return True
def run_process(cmd, timeout=10): """ run process with timeout """ if type(cmd) == bytes: cmd = cmd.decode('utf-8') if type(cmd) == str: cmd = cmd.split() if not timeout: subprocess.Popen(cmd) return None, None, None try: out = TemporaryFile() err = TemporaryFile() prc = subprocess.Popen(cmd, stdout=out, stderr=err) except: LOG.exception('error in run_process %s' % cmd) return -1, None, None starttime = time.time() while 1: if time.time() - starttime > timeout: LOG.error('run command %s timeout' % ' '.join(cmd)) try: kill_prc(prc) except: pass return -1, None, None if not alive(prc): out.flush() err.flush() out.seek(0) err.seek(0) return prc.poll(), out.read().decode('utf-8'), err.read().decode('utf-8') time.sleep(0.1)
def test_execute_commands(self): """Test executing arbitrary commands and logging their output.""" # All commands succeed. exp = (True, []) log_f = TemporaryFile(prefix=self.prefix, suffix='.txt') obs = _execute_commands(['echo foo', 'echo bar'], log_f, 1) self.assertEqual(obs, exp) exp = ("Command:\n\necho foo\n\nStdout:\n\nfoo\n\nStderr:\n\n\n" "Command:\n\necho bar\n\nStdout:\n\nbar\n\nStderr:\n\n\n") log_f.seek(0, 0) obs = log_f.read() self.assertEqual(obs, exp) # One command fails. exp = (False, []) log_f = TemporaryFile(prefix=self.prefix, suffix='.txt') obs = _execute_commands(['echo foo', 'foobarbaz'], log_f, 1) self.assertEqual(obs, exp) exp = ("Command:\n\necho foo\n\nStdout:\n\nfoo\n\nStderr:\n\n\n" "Command:\n\nfoobarbaz\n\nStdout:\n\n\nStderr:\n\n\n\n") log_f.seek(0, 0) obs = sub('Stderr:\n\n.*\n\n', 'Stderr:\n\n\n\n', log_f.read()) self.assertEqual(obs, exp)
class io: def __init__(self, data=""): self.stream = TemporaryFile(mode="w+b") self.stream.write(data) def __getitem__(self, key): self.stream.seek(key) return self.stream.read(1) def __setitem__(self, key, item): self.stream.seek(key) self.stream.write(item) def __str__(self): self.stream.seek(0) return self.stream.read() def __len__(self): return len(self) def save(self, path): with open( path, "w+", ) as f: f.write(str(self))
def test_CommandExecutor(self): """Test executing arbitrary commands and logging their output.""" # All commands succeed. exp = (True, []) log_f = TemporaryFile(prefix=self.prefix, suffix='.txt') cmd_exec = CommandExecutor(['echo foo', 'echo bar'], log_f) obs = cmd_exec(1) self.assertEqual(obs, exp) exp = ("Command:\n\necho foo\n\nStdout:\n\nfoo\n\nStderr:\n\n\n" "Command:\n\necho bar\n\nStdout:\n\nbar\n\nStderr:\n\n\n") log_f.seek(0, 0) obs = log_f.read() self.assertEqual(obs, exp) # One command fails. exp = (False, []) log_f = TemporaryFile(prefix=self.prefix, suffix='.txt') cmd_exec = CommandExecutor(['echo foo', 'foobarbaz'], log_f) obs = cmd_exec(1) self.assertEqual(obs, exp) exp = ("Command:\n\necho foo\n\nStdout:\n\nfoo\n\nStderr:\n\n\n" "Command:\n\nfoobarbaz\n\nStdout:\n\n\nStderr:\n\n\n\n") log_f.seek(0, 0) obs = sub('Stderr:\n\n.*\n\n', 'Stderr:\n\n\n\n', log_f.read()) self.assertEqual(obs, exp)
def apply(self, req, proj): """Run this prototype on a new project. NOTE: If you pass in a project that isn't new, this could explode. Don't do that. """ from api import TracForgeAdminSystem steps = TracForgeAdminSystem(self.env).get_project_setup_participants() db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute("DELETE FROM tracforge_project_log WHERE project=%s", (proj.name,)) db.commit() for step in self: action = args = None if isinstance(step, dict): action = step["action"] args = step["args"] else: action, args = step pid = os.fork() if not pid: # o_fd, o_file = mkstemp('tracforge-step', text=True) # e_fd, e_file = mkstemp('tracforge-step', text=True) o_file = TemporaryFile(prefix="tracforge-step", bufsize=0) e_file = TemporaryFile(prefix="tracforge-step", bufsize=0) sys.stdout = o_file sys.stderr = e_file os.dup2(o_file.fileno(), 1) os.dup2(e_file.fileno(), 2) rv = steps[action]["provider"].execute_setup_action(req, proj, action, args) self.env.log.debug("TracForge: %s() => %r", action, rv) o_file.seek(0, 0) o_data = o_file.read() o_file.close() e_file.seek(0, 0) e_data = e_file.read() e_file.close() db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( "INSERT INTO tracforge_project_log (project, action, args, return, stdout, stderr) VALUES (%s, %s, %s, %s, %s, %s)", (proj.name, action, args, int(rv), o_data, e_data), ) db.commit() db.close() os._exit(0) os.waitpid(pid, 0)
def generate_calberson_file(self, cr, uid, ids, context=None): tmpf = TemporaryFile('w+') data = base64.encodestring(tmpf.read()) for id_exp in ids: exp = self.pool.get('davidts.expedition').browse(cr, uid, id_exp, context=context) if exp.expedition_adr.country.id: code_country = self.pool.get('res.country').browse( cr, uid, exp.expedition_adr.country.id, context).code else: code_country = False cl1 = '8796522' cl8 = self.adapt(cr, uid, ids, exp.name, 9, context) cl17 = "" for i in range(21): cl17 = cl17 + " " cl38 = self.adapt(cr, uid, ids, exp.expedition_adr.name, 35, context) c73 = "" for i in range(35): c73 = c73 + " " cl108 = self.adapt(cr, uid, ids, exp.expedition_adr.street, 35, context) c143 = "" for i in range(10): c143 = c143 + " " cl153 = self.adapt(cr, uid, ids, code_country, 2, context) cl155 = self.adapt(cr, uid, ids, exp.expedition_adr.zip, 5, context) c160 = "" for i in range(5): c160 = c160 + " " cl165 = self.adapt(cr, uid, ids, exp.expedition_adr.city, 35, context) c200 = "" for i in range(85): c200 = c200 + " " cl285 = self.adapt(cr, uid, ids, exp.package_nb, 3, context) cl288 = self.adapt(cr, uid, ids, exp.palette_nb, 3, context) cl291 = self.adapt(cr, uid, ids, exp.total_weight, 5, context) c296 = "" for i in range(231): c296 = c296 + " " cl528 = self.adapt(cr, uid, ids, exp.note, 35, context) line = cl1 + cl8 + cl17 + cl38 + c73 + cl108 + c143 + cl153 + cl155 + c160 + cl165 + c200 + cl285 + cl288 + cl288 + cl291 + c296 + cl528 file_name = "calberson.anc" tmpf.write(str(line)) tmpf.seek(0) data = base64.encodestring(tmpf.read()) return {'calberson_data': data, 'calberson_name_file': "calberson.anc"}
def apply(self, req, proj): """Run this prototype on a new project. NOTE: If you pass in a project that isn't new, this could explode. Don't do that. """ from api import TracForgeAdminSystem steps = TracForgeAdminSystem(self.env).get_project_setup_participants() db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute('DELETE FROM tracforge_project_log WHERE project=%s', (proj.name, )) db.commit() for step in self: action = args = None if isinstance(step, dict): action = step['action'] args = step['args'] else: action, args = step pid = os.fork() if not pid: #o_fd, o_file = mkstemp('tracforge-step', text=True) #e_fd, e_file = mkstemp('tracforge-step', text=True) o_file = TemporaryFile(prefix='tracforge-step', bufsize=0) e_file = TemporaryFile(prefix='tracforge-step', bufsize=0) sys.stdout = o_file sys.stderr = e_file os.dup2(o_file.fileno(), 1) os.dup2(e_file.fileno(), 2) rv = steps[action]['provider'].execute_setup_action( req, proj, action, args) self.env.log.debug('TracForge: %s() => %r', action, rv) o_file.seek(0, 0) o_data = o_file.read() o_file.close() e_file.seek(0, 0) e_data = e_file.read() e_file.close() db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute( 'INSERT INTO tracforge_project_log (project, action, args, return, stdout, stderr) VALUES (%s, %s, %s, %s, %s, %s)', (proj.name, action, args, int(rv), o_data, e_data)) db.commit() db.close() os._exit(0) os.waitpid(pid, 0)
def main(): shell = pexpect.spawn("./miniShell") shell.logfile = sys.stdout # Test echo shell.sendline("echo hello") shell.expect("hello.*", timeout=2) # Test pwd shell.sendline("pwd") shell.expect("{}.*".format(os.getcwd()), timeout=2) # Test cd t = TemporaryFile() subprocess.call(["ls", "-l", ".."], stdout=t) t.seek(0) shell.sendline("cd ..") shell.sendline("ls -l") shell.expect(".*" + "drwxrwxr" + ".*", timeout=2) # Test non-existent file shell.sendline("loldontexist") shell.expect(".*in the PATH.*", timeout=2) # Test foreground waiting shell.sendline("firefox") shell.expect(".*EECE315.*", timeout=5) # Test background running shell.sendline("firefox &") shell.expect(".*EECE315.*", timeout=2) # Test file redirection shell.sendline("cd miniShell") if os.path.exists(TMP_FILE): os.remove(TMP_FILE) shell.sendline("ls -l .. > {}".format(TMP_FILE)) sleep(2) assert os.path.exists(TMP_FILE) with open(TMP_FILE, "r") as f: f_str = f.read().strip() t.seek(0) print t.read() t.seek(0) print f_str assert t.read().strip() == f_str # Test SIGINT handler shell.sendline(chr(3)) # Test quit shell = pexpect.spawn("./miniShell") shell.logfile = sys.stdout shell.sendline("quit")
def shell(cmd, stdin=None, stdout_as_debug=False, capture_stdout=True): """Run `cmd` as a shell command. Return an output stream (or None if stdout is not captured). Raise `errors.StepFailure` if the command fails. """ if isinstance(cmd, list): cmd = " ".join(cmd) if stdout_as_debug: cmd += ">&2" assert isinstance(cmd, str) log.debug(cmd) log.debug(f"Stdin is `{type(stdin)}`") # In debug mode, let stderr stream to the terminal (and the same # with stdout, unless we need it for capture). Otherwise, capture # stderr to a temporary file for error reporting (and stdout # unconditionally). if is_debug(): stderr = None if capture_stdout: stdout = TemporaryFile() else: stdout = None else: stderr = TemporaryFile() stdout = TemporaryFile() proc = subprocess.Popen( cmd, shell=True, stdin=stdin, stdout=stdout, stderr=stderr, env=os.environ, ) proc.wait() if stdout: stdout.seek(0) if proc.returncode: if stderr: stderr.seek(0) raise errors.StepFailure( cmd, stdout.read().decode("UTF-8") if stdout else "No stdout captured.", stderr.read().decode("UTF-8") if stderr else "No stderr captured.", ) return stdout
class CaqeRunTestCase(unittest.TestCase): def __init__(self, testmethod, test_name, qcir_file, options=None): super(CaqeRunTestCase, self).__init__(testmethod) self.qcir_file = qcir_file self.name = test_name self.expected = None self.stdout = TemporaryFile() self.stderr = TemporaryFile() self.options = options for line in open(qcir_file, 'r'): if 'r SAT' in line: self.expected = RESULT_SAT elif 'r UNSAT' in line: self.expected = RESULT_UNSAT def id(self): if not self.options: return "CaqeRunTestCase.{}".format(self.name) else: return "CaqeRunTestCase.{} {}".format(self.name, ' '.join(self.options)) def test_run(self): returncode = run_caqe(self.qcir_file, self.stdout, self.stderr, options=self.options) self.stdout.seek(0) self.stderr.seek(0) self.evaluate_results( returncode, self.stdout.read().decode() + self.stderr.read().decode()) def evaluate_results(self, returncode, output): result = returncode if returncode in [RESULT_SAT, RESULT_UNSAT ] and returncode != self.expected: if self.expected is None: result = RESULT_UNKNOWN else: result = RESULT_WRONG if returncode == RESULT_UNKNOWN: self.skipTest(reason=result_mapping[returncode]) elif result in [RESULT_UNKNOWN, RESULT_TIMEOUT]: self.skipTest(reason=result_mapping[result]) elif result in [RESULT_WRONG, RESULT_CRASHED]: self.fail(result_mapping[result] + '\n\nOutput:' + output) self.assertIn(result, [RESULT_SAT, RESULT_UNSAT])
def save_submission(conf, valid_repr, test_repr): """ Create a submission file given a configuration dictionary and a representation for valid and test. Parameters ---------- conf : WRITEME valid_repr : WRITEME test_repr : WRITEME """ logger.info('... creating zipfile') # Ensure the given directory is correct submit_dir = conf['savedir'] if not os.path.exists(submit_dir): os.makedirs(submit_dir) elif not os.path.isdir(submit_dir): raise IOError('savedir %s is not a directory' % submit_dir) basename = os.path.join(submit_dir, conf['dataset'] + '_' + conf['expname']) # If there are too much features, outputs kernel matrices if (valid_repr.shape[1] > valid_repr.shape[0]): valid_repr = numpy.dot(valid_repr, valid_repr.T) test_repr = numpy.dot(test_repr, test_repr.T) # Quantitize data valid_repr = numpy.floor((valid_repr / valid_repr.max()) * 999) test_repr = numpy.floor((test_repr / test_repr.max()) * 999) # Store the representations in two temporary files valid_file = TemporaryFile() test_file = TemporaryFile() numpy.savetxt(valid_file, valid_repr, fmt="%.3f") numpy.savetxt(test_file, test_repr, fmt="%.3f") # Reread those files and put them together in a .zip valid_file.seek(0) test_file.seek(0) submission = zipfile.ZipFile(basename + ".zip", "w", compression=zipfile.ZIP_DEFLATED) submission.writestr(basename + '_valid.prepro', valid_file.read()) submission.writestr(basename + '_final.prepro', test_file.read()) submission.close() valid_file.close() test_file.close()
def temporary_file(): from tempfile import TemporaryFile, NamedTemporaryFile # 文件系统中没有名字,找不到,只能在一个进程中访问 file = TemporaryFile() file.write(b'abdef' * 100000) file.seek(0) print(file.read(100)) print('-' * 20) print(file.read(100)) # 文件系统中有名字,可以找到,将delete参数设置为false,即使当前进程不需要改临时文件,也不会立即删除该文件,因此可以设置让多个进程同时访问 named_temp_file = NamedTemporaryFile(delete=False) named_temp_file.write(b'abc' * 100) print(named_temp_file.name)
def save_submission(conf, valid_repr, test_repr): """ Create a submission file given a configuration dictionary and a representation for valid and test. Parameters ---------- conf : WRITEME valid_repr : WRITEME test_repr : WRITEME """ print '... creating zipfile' # Ensure the given directory is correct submit_dir = conf['savedir'] if not os.path.exists(submit_dir): os.makedirs(submit_dir) elif not os.path.isdir(submit_dir): raise IOError('savedir %s is not a directory' % submit_dir) basename = os.path.join(submit_dir, conf['dataset'] + '_' + conf['expname']) # If there are too much features, outputs kernel matrices if (valid_repr.shape[1] > valid_repr.shape[0]): valid_repr = numpy.dot(valid_repr, valid_repr.T) test_repr = numpy.dot(test_repr, test_repr.T) # Quantitize data valid_repr = numpy.floor((valid_repr / valid_repr.max())*999) test_repr = numpy.floor((test_repr / test_repr.max())*999) # Store the representations in two temporary files valid_file = TemporaryFile() test_file = TemporaryFile() numpy.savetxt(valid_file, valid_repr, fmt="%.3f") numpy.savetxt(test_file, test_repr, fmt="%.3f") # Reread those files and put them together in a .zip valid_file.seek(0) test_file.seek(0) submission = zipfile.ZipFile(basename + ".zip", "w", compression=zipfile.ZIP_DEFLATED) submission.writestr(basename + '_valid.prepro', valid_file.read()) submission.writestr(basename + '_final.prepro', test_file.read()) submission.close() valid_file.close() test_file.close()
def command(cmd, timeout=120): """receive message with timeout""" tmp = TemporaryFile() proc = subprocess.Popen(cmd, shell=True, stdout=tmp, stderr=tmp) while timeout > 0: ret = proc.poll() if ret is not None: tmp.seek(0) if ret: raise EmulatorActionException(tmp.read()) return tmp.read() time.sleep(1) timeout -= 1 proc.kill() raise EmulatorTimeoutException
def do_POST(self): try: request = loads( self.rfile.read(int(self.headers['Content-Length']))) if 'stdin' in request: stdin = TemporaryFile() stdin.write(request['stdin']) stdin.seek(0) else: stdin = None stdout = TemporaryFile() stderr = TemporaryFile() shell = request['shell'] if 'shell' in request else False returncode = call(request['args'], stdin=stdin, stdout=stdout, stderr=stderr, shell=shell) if stdin != None: stdin.close() stdout.seek(0) stderr.seek(0) response_code = 200 response_body = { 'returncode': returncode, 'stdout': stdout.read(), 'stderr': stderr.read() } stdout.close() stderr.close() except: response_code = 500 response_body = {'error': format_exc()} self.send_response(response_code) self.send_header('Content-Type', 'application/json') self.end_headers() self.wfile.write(dumps(response_body)) self.wfile.write('\n')
class Response: def __init__(self, data): data = data.decode('utf8') method, hdata = data.split('\r\n', 1) code = method.split(' ', 2) self.version = code[0] self.code = code[1] self.reason = code[2] self.headers = Headers(hdata) self.fd = TemporaryFile('w+b') def content(self): data = self.fd.read() self.fd.seek(0) encoding = self.header_encoding() if encoding is None: return data try: return data.decode(encoding) except UnicodeDecodeError as e: return data def header_encoding(self): ctype = self.headers.get('content-type') if ctype is not None: return parse_header(ctype)[1].get('charset')
def numpy_to_string(array): """Convert numpy array into human-readable string. Good for passing to other programs. Notes: human-readable string example: 1 2 3 4 5 6 is a string for the following array: [[1,2,3] [4,5,6]] Args: array (numpy): array to convert to human-readable string Returns: human-readable string of array """ f = TemporaryFile() np.savetxt(f, array, fmt='%.8g') f.seek(0) string = f.read() return string
def pdf_workup(request, pk): wu = get_object_or_404(models.Workup, pk=pk) active_provider_type = get_object_or_404(ProviderType, pk=request.session['clintype_pk']) if active_provider_type.staff_view: data = {'workup': wu} template = get_template('workup/workup_body.html') html = template.render(data) file = TemporaryFile(mode="w+b") pisa.CreatePDF(html.encode('utf-8'), dest=file, encoding='utf-8') file.seek(0) pdf = file.read() file.close() initials = ''.join(name[0].upper() for name in wu.patient.name(reverse=False, middle_short=False).split()) formatdate = '.'.join([str(wu.clinic_day.clinic_date.month).zfill(2), str(wu.clinic_day.clinic_date.day).zfill(2), str(wu.clinic_day.clinic_date.year)]) filename = ''.join([initials, ' (', formatdate, ')']) response = HttpResponse(pdf, 'application/pdf') response["Content-Disposition"] = "attachment; filename=%s.pdf" % (filename,) return response else: return HttpResponseRedirect(reverse('workup', args=(wu.id,)))
def graph(request, type, show_name): if not has_matplotlib: return HttpResponse("matplotlib missing") graph = None # TODO: get cached graph if not graph: graph_lock.acquire() tmpfile = TemporaryFile() figure = pyplot.figure(1, figsize=(4, 3)) if type == "weekday": _weekday_graph(show_name) elif type == "hours": _hours_graph(show_name) elif type == "weekday_hours": _weekday_hours_graph(show_name) elif type == "time_per_episode": _time_per_episode_graph(show_name, figure) pyplot.savefig(tmpfile, format="png") pyplot.close(figure) pyplot.clf() tmpfile.seek(0) graph = tmpfile.read() tmpfile.close() graph_lock.release() return HttpResponse(graph, content_type="image/png")
def test_title(self): stream = TemporaryFile() f = formatters.TerminfoFormatter(stream, 'xterm+sl', True, 'ascii') f.title('TITLE') stream.seek(0) self.assertEqual(compatibility.force_bytes('\x1b]0;TITLE\x07'), stream.read())
def cxx(self): ofile=TemporaryFile('w+t') if self.ua: bin=ANSWER_PATH+self.id+'/x'+self.id else: bin=BINARY_PATH+self.id+'/x'+self.id p=Popen(bin,stdin=self.ifile,stdout=ofile,universal_newlines=True, preexec_fn=Tester.Limiter(self.lcpu,self.lmem),stderr=DEVNULL) p.wait() self.result=0 if p.returncode==-9: self.result=-5 elif p.returncode==-11: self.result=-6 elif p.returncode==-25: self.result=-4 elif p.returncode<0: self.result=-3 else: ofile.seek(0) out = clear(list(self.output.strip())) ans = clear(list(ofile.read(-1).strip())) len1, len2 = len(out), len(ans) if len1 == len2: for i in range(len1): if str(out[i]) != str(ans[i]): self.result=-7 print("Wrong Answer, len1 == len2") break else: self.result=-7 print("Wrong Answer, len1 != len2") print ("output:\n%s" % out) print ("ofile:\n%s" % ans)
def dataentry(self): self.toaster.msgblockbegin("writing to temporary file") f_tmp = TemporaryFile() try: total_padding = self.data.write(f_tmp) # comparing the files will usually be different because blocks may # have been written back in a different order, so cheaply just compare # file sizes self.toaster.msg("comparing file sizes") self.stream.seek(0, 2) f_tmp.seek(0, 2) if self.stream.tell() != f_tmp.tell(): self.toaster.msg("original size: %i" % self.stream.tell()) self.toaster.msg("written size: %i" % f_tmp.tell()) self.toaster.msg("padding: %i" % total_padding) if self.stream.tell() > f_tmp.tell() or self.stream.tell() + total_padding < f_tmp.tell(): f_tmp.seek(0) f_debug = open("debug.cgf", "wb") f_debug.write(f_tmp.read(-1)) f_debug.close() raise Exception('write check failed: file sizes differ by more than padding') finally: f_tmp.close() self.toaster.msgblockend() # spell is finished: prevent recursing into the tree return False
def image_register(request): params = request.GET if request.method == "GET" else request.POST if not ("url" in params): content = {"message": u"パラメータ`url`が指定されていません"} return Response(content, status=status.HTTP_400_BAD_REQUEST) try: image = Images.objects.get(url=params["url"]) except ObjectDoesNotExist: image = Images(url=params["url"], adult_flag=False, grotesque_flag=False) descriptor = factory.descriptor(filepath=image.local_path) if descriptor == None: content = {"message": u"ローカルに画像が見つかりません"} return Response(content, status=status.HTTP_412_PRECONDITION_FAILED) else: tmp = TemporaryFile() try: np.save(tmp, descriptor) tmp.seek(0) image.description = tmp.read() image.save() finally: tmp.close() return Response(ImageMapper(image).as_dict())
def test_one_key_per_block_writer(self): # 2 pointers and a 1 byte string null terminated string = 10 bytes stream = TemporaryFile() i = IndexWriter(stream, block_size=10, terminator='\0') i.add(0, 'b') eq_(len(i.indexes), 1) i.add(0, 'c') eq_(len(i.indexes), 2) i.finish() stream.seek(0) packet = stream.read() eq_(len(packet), 30) root_block = packet[:10] eq_(root_block, '\x01\x00\x00\x00c\x00\x02\x00\x00\x00') block_1 = packet[10:20] eq_(block_1, '\x03\x00\x00\x00b\x00\x04\x00\x00\x00') block_2 = packet[20:] eq_(block_2, '\x04\x00\x00\x00c\x00\x05\x00\x00\x00')
def __init__(self, oauth, version, installation_step, purge_on_delete=False): self.version = version # Construct and set the package_zip file if self.version.number: self.package_zip = PackageZipBuilder( self.version.package.namespace, self.version.number).install_package() elif self.version.zip_url or self.version.repo_url: if self.version.repo_url: repo_url = self.version.repo_url git_ref = self.version.branch if installation_step.installation.git_ref: git_ref = installation_step.installation.git_ref if installation_step.installation.fork: repo_url_parts = repo_url.split('/') repo_url_parts[3] = installation_step.installation.fork repo_url = '/'.join(repo_url_parts) zip_url = '%s/archive/%s.zip' % (repo_url, git_ref) else: zip_url = self.version.zip_url # Deploy a zipped bundled downloaded from a url try: zip_resp = requests.get(zip_url) except: raise ValueError('Failed to fetch zip from %s' % self.version.zip_url) zipfp = TemporaryFile() zipfp.write(zip_resp.content) zipfile = ZipFile(zipfp, 'r') if not self.version.subfolder and not self.version.repo_url: zipfile.close() zipfp.seek(0) self.package_zip = base64.b64encode(zipfp.read()) else: ignore_prefix = '' if self.version.repo_url: # Get the top level folder from the zip ignore_prefix = '%s/' % zipfile.namelist()[0].split('/')[0] # Extract a subdirectory from the zip subdirectory = ignore_prefix + self.version.subfolder subzip = zip_subfolder(zipfile, subdirectory, self.version.namespace_token, self.version.namespace) subzipfp = subzip.fp subzip.close() subzipfp.seek(0) self.package_zip = base64.b64encode(subzipfp.read()) super(ApiInstallVersion, self).__init__(oauth, self.package_zip, installation_step, purge_on_delete)
def pyc(self): ofile=TemporaryFile('w+t') if self.ua: dst=ANSWER_PATH+self.id+'.pyc' else: dst=BINARY_PATH+self.id+'.pyc' cmd=['python',dst] p=Popen(cmd,stdin=self.ifile,stdout=ofile,universal_newlines=True, preexec_fn=Tester.Limiter(self.lcpu,self.lmem),stderr=DEVNULL) p.wait() self.result=0 if p.returncode==-9: self.result=-5 elif p.returncode==-11: self.result=-6 elif p.returncode==-25: self.result=-4 elif p.returncode<0: self.result=-3 else: ofile.seek(0) if self.output!=ofile.read(-1): self.result=-7 pass
def _ftpGet(self): """ Applies FTP commands to get the file. """ _, path = self._url.split("://") _split = path.split("/") host = _split[0] path = "/".join(_split[1:-1]) file = _split[-1] try: ftp = FTP(host, timeout=60) ftp.login() ftp.cwd(path) tmpfile = TemporaryFile() ftp.retrbinary("RETR " + file, tmpfile.write) tmpfile.seek(0) data = tmpfile.read() tmpfile.close() return data except TimeoutError: print("Timeout while fetching {}".format(self._url)) return None
def pdf_workup(request, pk): wu = get_object_or_404(models.Workup, pk=pk) active_provider_type = get_object_or_404(ProviderType, pk=request.session['clintype_pk']) if active_provider_type.staff_view: data = {'workup': wu} template = get_template('workup/workup_body.html') html = template.render(data) file = TemporaryFile(mode="w+b") pisa.CreatePDF(html.encode('utf-8'), dest=file, encoding='utf-8') file.seek(0) pdf = file.read() file.close() initials = ''.join(name[0].upper() for name in wu.patient.name( reverse=False, middle_short=False).split()) formatdate = '.'.join([ str(wu.clinic_day.clinic_date.month).zfill(2), str(wu.clinic_day.clinic_date.day).zfill(2), str(wu.clinic_day.clinic_date.year) ]) filename = ''.join([initials, ' (', formatdate, ')']) response = HttpResponse(pdf, 'application/pdf') response["Content-Disposition"] = "attachment; filename=%s.pdf" % ( filename, ) return response else: return HttpResponseRedirect(reverse('workup', args=(wu.id, )))
def dataentry(self): self.toaster.msgblockbegin("writing to temporary file") f_tmp = TemporaryFile() try: total_padding = self.data.write(f_tmp) # comparing the files will usually be different because blocks may # have been written back in a different order, so cheaply just compare # file sizes self.toaster.msg("comparing file sizes") self.stream.seek(0, 2) f_tmp.seek(0, 2) if self.stream.tell() != f_tmp.tell(): self.toaster.msg("original size: %i" % self.stream.tell()) self.toaster.msg("written size: %i" % f_tmp.tell()) self.toaster.msg("padding: %i" % total_padding) if self.stream.tell() > f_tmp.tell( ) or self.stream.tell() + total_padding < f_tmp.tell(): f_tmp.seek(0) f_debug = open("debug.cgf", "wb") f_debug.write(f_tmp.read(-1)) f_debug.close() raise Exception( 'write check failed: file sizes differ by more than padding' ) finally: f_tmp.close() self.toaster.msgblockend() # spell is finished: prevent recursing into the tree return False
def getUpdateElements(self, valueMap): ''' @param valueMap: ''' elements = "" for name in valueMap.keys(): fullname = name if isinstance(name, types.StringType): fullname = (self.defaultNameSpace, name) if not fullname[0]: tag = fullname[1] else: tag = self.shortcuts[fullname[0]] + ':' + fullname[1] value = valueMap[name] if value: if isinstance(value, qp_xml._element): tmpFile = TemporaryFile('w+') value = qp_xml.dump(tmpFile, value) tmpFile.flush() tmpFile.seek(0) tmpFile.readline() value = tmpFile.read() else: value = "<![CDATA[%s]]>" % value else: value = "" elements += "<%s>%s</%s>" % (tag, value, tag) return elements
class CandidateUploadFile(BaseHandler): def initialize(self): self.tempfile = TemporaryFile() @tornado.web.authenticated @granted() @tornado.web.asynchronous def post(self): fp_url = self.get_argument("url") mime_type = self.get_argument("data[type]") size = int(self.get_argument("data[size]")) candidate_id = self.get_argument("id") self.candidate = self.db.query(Candidate).get(int(candidate_id)) logging.info("type: %s, size: %r", mime_type, size) if mime_type == "image/jpeg" and size < MAX_UPLOAD_SIZE: http_client = tornado.httpclient.AsyncHTTPClient() request = tornado.httpclient.HTTPRequest(url=fp_url, streaming_callback=self.streaming_callback) http_client.fetch(request, self.on_download) else: self.finish(dict(status=0)) def streaming_callback(self, data): self.tempfile.write(data) logging.info("This is the streaming_callback file tell function: %r", self.tempfile.tell()) def on_download(self, response): img_path = os.path.join(os.path.dirname(__file__), "static/profiles/img/" + str(self.candidate.backup_id) + '.jpg') self.tempfile.seek(0) ptr = open(img_path, 'wb') ptr.write(self.tempfile.read()) ptr.close() self.tempfile.close() self.finish(dict(src="/static/profiles/img/" + str(self.candidate.backup_id) + '.jpg', status=1))
def draw(self): """Erstellt Chart als GIF. Gibt das GIF als string zurück.""" from tempfile import TemporaryFile f = TemporaryFile() self.chart.draw(f) f.seek(0) return f.read()
def download(self, search_term): flickr = FlickrAPI(self.API_KEY, self.SHARED_SECRET, format='parsed-json') list = flickr.photos.search(text=search_term, per_page=5, extras='url_m') photos = list['photos'] for photo in photos['photo']: if 'url_m' not in photo: continue url = photo['url_m'] tfile = TemporaryFile() req = requests.get(url, stream=True) with tfile: tfile.write(req.content) tfile.seek(0) client = boto3.client('rekognition') response = client.detect_labels(Image={'Bytes': tfile.read()}, MinConfidence=50) self.report = self.generate_report(photo['title'], response['Labels'], url) self.report += "</body></html>" with open('report.html', 'w') as report_html: report_html.write(self.report)
def get_registry_proto(self): file_obj = TemporaryFile() registry_proto = RegistryProto() try: from botocore.exceptions import ClientError except ImportError as e: from feast.errors import FeastExtrasDependencyImportError raise FeastExtrasDependencyImportError("aws", str(e)) try: bucket = self.s3_client.Bucket(self._bucket) self.s3_client.meta.client.head_bucket(Bucket=bucket.name) except ClientError as e: # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = int(e.response["Error"]["Code"]) if error_code == 404: raise S3RegistryBucketNotExist(self._bucket) else: raise S3RegistryBucketForbiddenAccess(self._bucket) from e try: obj = bucket.Object(self._key) obj.download_fileobj(file_obj) file_obj.seek(0) registry_proto.ParseFromString(file_obj.read()) return registry_proto except ClientError as e: raise FileNotFoundError( f"Error while trying to locate Registry at path {self._uri.geturl()}" ) from e
class PackageZipBuilder(object): def __init__(self, namespace, version=None): self.namespace = namespace self.version = version def open_zip(self): self.zip_file = TemporaryFile() self.zip= ZipFile(self.zip_file, 'w') def install_package(self): self.open_zip() if not self.version: raise ValueError('You must provide a version to install a package') package_xml = PACKAGE_XML % self.namespace #package_xml = package_xml.encode('utf-8') self.zip.writestr('package.xml', package_xml) installed_package = INSTALLED_PACKAGE % self.version #installed_package.encode('utf-8') self.zip.writestr('installedPackages/%s.installedPackage' % self.namespace, installed_package) return self.encode_zip() def uninstall_package(self): self.open_zip() self.zip.writestr('package.xml', EMPTY_PACKAGE_XML) self.zip.writestr('destructiveChanges.xml', PACKAGE_XML % self.namespace) return self.encode_zip() def encode_zip(self): self.zip.close() self.zip_file.seek(0) return b64encode(self.zip_file.read())
def sendKey(id, ipfshash, address): temp = TemporaryFile('w+b') bytes_key = ipfsclient.cat(str(ipfshash)) temp.write(bytes_key) content = '尊敬的版权购买者,这是您购买的id为%s版权,请查收。' % (id) textApart = MIMEText(content) temp.seek(0) zipApart = MIMEApplication(temp.read()) zipApart.add_header('Content-Disposition', 'attachment', filename='%s_%s.key' % (id, ipfshash)) m = MIMEMultipart() m.attach(textApart) m.attach(zipApart) m['Subject'] = Header('您的版权密钥——区块链音乐版权平台', 'utf-8') m['from'] = EMAIL_HOST_USER m['to'] = address try: server = smtplib.SMTP() server.connect(EMAIL_HOST, 25) server.login(EMAIL_HOST_USER, EMAIL_HOST_PASSWORD) server.sendmail(EMAIL_HOST_USER, address, m.as_string()) server.quit() temp.close() return 'email send success' except smtplib.SMTPException as e: temp.close() return 'email send error%s' % e
def execute_local(args, env=None, zerobyte=False): """ Execute a command locally. This method is a wrapper for :py:class:`subprocess.Popen` with stdout and stderr piped to temporary files and ``shell=True``. :param str args: command with arguments (e.g. 'sbatch myjob.sh') :param dict env: environment variables (default: {}) :return: object with attributes ``stdout``, ``stderr`` \ and ``returncode`` :rtype: :py:obj:`object` """ from tempfile import TemporaryFile from subprocess import Popen # Note: PIPE will cause deadlock if output is larger than 65K stdout, stderr = TemporaryFile("w+"), TemporaryFile("w+") handle = type('Handle', (object, ), { 'stdout': [], 'stderr': [], 'returncode': 0 })() p = Popen(args, stdout=stdout, stderr=stderr, env=env, shell=True) p.wait() if zerobyte: strstdout = stdout.seek(0) or stdout.read() handle.stdout = strstdout.split('\0') else: handle.stdout = stdout.seek(0) or stdout.readlines() handle.stderr = stderr.seek(0) or stderr.readlines() handle.returncode = p.returncode return handle
def shell_command(cmd, directory): """ Execute a command in a shell cmd : the command as a string dir : the directory where the cmd is executed Output : status """ from subprocess import Popen, STDOUT, PIPE output_stream = TemporaryFile() if platform.system() == 'Darwin': p = Popen(cmd, shell=True, cwd=directory, stdin=PIPE, stdout=output_stream, stderr=PIPE) status = p.communicate() else: p = Popen(cmd, shell=True, cwd=directory, stdin=PIPE, stdout=output_stream, stderr=STDOUT) status = p.wait() output_stream.seek(0) s = output_stream.read() output_stream.close() return status, s
def sandbox_helper(sandbox: Sandbox, command, privileged=False): stdout, stderr = TemporaryFile("wb+"), TemporaryFile("wb+") sandbox.execute(command=command, stdin_fd=None, stdout_fd=stdout, stderr_fd=stderr, privileged=privileged) stdout.flush() stdout.seek(0) stdout_text = stdout.read().decode().strip() stdout.close() stderr.flush() stderr.seek(0) stderr_text = stderr.read().decode().strip() stderr.close() # If running java or javac or jar the JVM prints an annoying message: # "Picked up JAVA_TOOL_OPTIONS: <actual options set by sandbox environment> # Remove it from the stderr if it is there if any(java in command for java in ["java", "javac", "jar"]): stdout_text = "\n".join([ line for line in stdout_text.splitlines() if not line.startswith("Picked up JAVA_TOOL_OPTIONS") ]) stderr_text = "\n".join([ line for line in stderr_text.splitlines() if not line.startswith("Picked up JAVA_TOOL_OPTIONS") ]) return stdout_text, stderr_text
def install_package_version(request, namespace, number): oauth = request.session.get('oauth_response', None) if not oauth: raise HttpResponse('Unauthorized', status=401) version = get_object_or_404(PackageVersion, package__namespace = namespace, number = number) # Log the install install = PackageInstallation( package = version.package, version = version, action = 'install', username = oauth['username'], org_id = oauth['org_id'], org_type = oauth['org_type'], status = 'Starting', ) install.save() request.session['mpinstaller_current_install'] = install.id endpoint = build_endpoint_url(oauth) # If we have a version number, install via a custom built metadata package using InstalledPackage if version.number: # Build a zip for the install package package_zip = PackageZipBuilder(namespace, number).install_package() else: try: zip_resp = requests.get(version.zip_url) zipfp = TemporaryFile() zipfp.write(zip_resp.content) zipfile = ZipFile(zipfp, 'r') zipfile.close() zipfp.seek(0) package_zip = base64.b64encode(zipfp.read()) # FIXME: Implement handling of the subdir field except: raise ValueError('Failed to fetch zip from %s' % version.zip_url) # Construct the SOAP envelope message message = SOAP_DEPLOY % {'package_zip': package_zip} message = message.encode('utf-8') headers = { 'Content-Type': "text/xml; charset=UTF-8", 'Content-Length': len(message), 'SOAPAction': 'deploy', } response = call_mdapi(request, url=endpoint, headers=headers, data=message) id = parseString(response.content).getElementsByTagName('id')[0].firstChild.nodeValue # Delete the cached org package versions if request.session.get('mpinstaller_org_packages', None) is not None: del request.session['mpinstaller_org_packages'] return HttpResponse(json.dumps({'process_id': id}), content_type='application/json')
def index_html (self, icon=0, preview=0, width=None, height=None, REQUEST=None): """ Return the file with it's corresponding MIME-type """ if REQUEST is not None: if self._if_modified_since_request_handler(REQUEST): self.ZCacheable_set(None) return '' if self._redirect_default_view_request_handler(icon, preview, REQUEST): return '' filename, content_type, icon, preview = self._get_file_to_serve(icon, preview) filename = self._get_fsname(filename) if _debug > 1: logger.info('serving %s, %s, %s, %s' %(filename, content_type, icon, preview)) if filename: size = os.stat(filename)[6] else: filename = self._get_icon_file(broken=True) size = os.stat(filename)[6] content_type = 'image/gif' icon = 1 if icon==0 and width is not None and height is not None: data = TemporaryFile() # hold resized image try: from PIL import Image im = Image.open(filename) if im.mode!='RGB': im = im.convert('RGB') filter = Image.BICUBIC if hasattr(Image, 'ANTIALIAS'): # PIL 1.1.3 filter = Image.ANTIALIAS im = im.resize((int(width),int(height)), filter) im.save(data, 'JPEG', quality=85) except: data = open(filename, 'rb') else: data.seek(0,2) size = data.tell() data.seek(0) content_type = 'image/jpeg' else: data = open(filename, 'rb') if REQUEST is not None: last_mod = rfc1123_date(self._p_mtime) REQUEST.RESPONSE.setHeader('Last-Modified', last_mod) REQUEST.RESPONSE.setHeader('Content-Type', content_type) REQUEST.RESPONSE.setHeader('Content-Length', size) self.ZCacheable_set(None) return stream_iterator(data) try: return data.read() finally: data.close()
def run(self): """Run the command.""" if not self.to_exec: raise RxcCommandException('Command {0} has not been insuflated'.format(self.command_id)) stdout = TemporaryFile() stderr = TemporaryFile() return_code = call(self.to_exec, shell=True, stdout=stdout, stderr=stderr) stdout.flush() stderr.flush() stdout.seek(0) stderr.seek(0) return { 'command' : self.to_exec, 'return_code' : return_code, 'stdout' : stdout.read(), 'stderr' : stderr.read() }
def _pickleMessage(self, message): t = TemporaryFile() pickle.dump(message, t) t.seek(0) str_msg = t.read() str_len = len(str_msg) message = "%-8d" % (str_len,) + str_msg return message
def execute(self,args): """ useful function available to recipes """ stdout_buffer = TemporaryFile() stderr_buffer = TemporaryFile() p = Popen(args,stdout=stdout_buffer,stderr=stderr_buffer, cwd=self.push.checkout_dir(),close_fds=True) ret = p.wait() stdout_buffer.seek(0) stderr_buffer.seek(0) stdout = stdout_buffer.read() stderr = stderr_buffer.read() stdout_buffer.close() stderr_buffer.close() l = Log(pushstage=self,command=" ".join(args),stdout=stdout,stderr=stderr) return (ret,stdout,stderr)
def output_highlighter(file): tmp_file = TemporaryFile() for line in file: line = line.replace('\\n', '\n'.ljust(9)) for key_word in _KEY_WORDS: line = line.replace(key_word, _BASE.format(key_word.lower(), key_word)) tmp_file.write(line) tmp_file.seek(0) return tmp_file.read()
def test_title(self): stream = TemporaryFile() try: f = formatters.TerminfoFormatter(stream, 'xterm+sl', True, 'ascii') except curses.error: raise SkipTest("xterm+sl not in terminfo db") f.title('TITLE') stream.seek(0) self.assertEqual(b'\x1b]0;TITLE\x07', stream.read())
def run_playbook(self, yml, should_fail=False): playbook_path = self.dir.write([self.workspace, 'playbook.yml'], yml) inventory_path = os.path.join(test_dir, 'inventory') output = TemporaryFile() try: check_call( ['ansible-playbook', '-vv', '-i', inventory_path, playbook_path], cwd=self.workspace_path, stdout=output ) except CalledProcessError: output.seek(0) if should_fail: return output.read() else: print('cwd:', self.workspace_path) print(output.read()) raise