def setup_pkg(virtualenv, pkg_dir, options, test=True, indent_txt='', deps=True): """ Sets up a package in the given virtualenv Parameters ---------- virtualenv : `str` virtulenv base dir pkg_dir : `str` package checkout dir options : `optparse.Options` command-line options test : `bool` True/False will install test packages deps : `bool` True/False will install dependencies """ get_log().info("%s Setting up package in %s" % (indent_txt, pkg_dir)) if getattr(options, 'no_action', False): return python = os.path.join(virtualenv, 'bin', 'python') cmd = [python, 'setup.py'] if options.verbose: cmd.append('-v') cmd.append('develop') if not test: cmd.append('--no-test') if getattr(options, 'prefer_final', False): cmd.append('--prefer-final') if not deps: cmd.append('--no-deps') with cmdline.chdir(pkg_dir): cmdline.run(cmd, capture_stdout=not options.verbose)
def install_pkg(virtualenv, pkg, options=DEFAULT_OPTIONS, version=None, allow_source_package=False): """ Install package in the given virtualenv Parameters ---------- virtualenv : `str` Virtualenv base path pkg : `str` Package name version : `str` Package version, defaults to latest available allow_source_package : `bool` If the package is setup as a source package in the current runtime context, set it up from there otherwise use easy_install. """ requirement = pkg if allow_source_package: dists = [d for d in pkg_resources.working_set if d.project_name == pkg] if dists and not dists[0].location.endswith('.egg'): setup_pkg(virtualenv, dists[0].location, options) return if version: requirement = '%s==%s' % (pkg, version) get_log().info("Installing requirement %s" % requirement) if getattr(options, 'no_action', False): return # Using easy_install as an argument here to get around #! path limits cmdline.run([os.path.join(virtualenv, 'bin', 'python'), os.path.join(virtualenv, 'bin', 'easy_install'), requirement])
def checkout_pkg(dest_dir, pypi, pkg, options, branch='trunk', indent_txt='', vcs='svn'): """ Checks out a package by name to a specified dest dir Parameters ---------- dest_dir : `str` Destination dir pypi : `pkglib.pypi.PyPi` PyPi API pkg : `str` Package Name branch : `str` VCS branch name options : `optparse.Options` Cmdline options """ if os.path.isfile(os.path.join(dest_dir, 'setup.py')): get_log().info("%s %s already checked out" % (indent_txt, pkg)) return uri = pypi.get_vcs_uri(pkg) if vcs == 'svn': uri = '%s/%s' % (uri, branch) cmd = ['svn' 'co', uri, dest_dir] else: raise ValueError("Unsupported vcs: {0}".format(vcs)) get_log().info("%s Checking out %s from %s" % (indent_txt, pkg, uri)) if getattr(options, 'no_action', False): return cmdline.run(cmd, capture_stdout=not options.verbose)
def test_cmdline_run_logs_stdout_on_failure(): cmd = get_python_cmd('sys.stdout.write("stdout")', exit_code=1) with patch('pkglib_util.cmdline.get_log') as get_log: with raises(CalledProcessError): cmdline.run(cmd, capture_stdout=True) expected = 'Command failed: "%s"\nstdout' % " ".join(cmd) get_log.return_value.error.assert_called_with(expected)
def test_cmdline_run_passes_stdout_if_not_captured(): with patch('subprocess.Popen') as Popen: Popen.return_value.returncode = 0 Popen.return_value.communicate.return_value = (None, '') cmdline.run(sentinel.cmd, capture_stdout=False, capture_stderr=True) Popen.assert_called_with(sentinel.cmd, stdin=None, stdout=None, stderr=STDOUT)
def test_cmdline_run_passes_stderr_if_not_captured(): cmd = get_python_cmd('sys.stderr.write("stderr")') cmd = ", ".join(['\'%s\'' % s for s in cmd]) p = subprocess.Popen([sys.executable, '-c', """from pkglib_util import cmdline cmdline.run([%s], capture_stderr=False)""" % cmd], stderr=PIPE) _, err = p.communicate() assert p.returncode == 0 assert 'stderr' in err.decode('utf-8')
def test_cmdline_run_logs_stderr_on_failure(): cmd = get_python_cmd('sys.stderr.write("stderr")', exit_code=1) with patch('pkglib_util.cmdline.get_log') as get_log: with raises(CalledProcessError): cmdline.run(cmd, capture_stderr=True) expected = 'Command failed: "%s"\nstderr' % " ".join(cmd) assert get_log.return_value.error.call_count == 1 assert get_log.return_value.error.call_args[0][0].startswith(expected)
def mirror_eggs(self, file_root, target_host, target_root, target_packages=None, subprocesses=10): """ Mirrors egg files from this PyPi instance to a target host and path. Used for filling out a cache that can be used by CONFIG.installer_search_path. Parameters ---------- file_root : `str` filesystem path to the root of the file store target_host : `str` host to mirror to target_root : `str` filesystem path to mirror to on target host target_packages : `list` or None list of packages to mirror. Use None for all. subprocesses : `int` number of subprocesses to spawn when doing the mirror """ pkg_dirs, target_dirs = self.get_mirror_targets(file_root, target_root, target_packages) work = [] for pkg in pkg_dirs: # Filter non-egg and dev packages out, as this is a site-packages # mirror which won't work with source packages. files = [os.path.join(pkg, f) for f in os.listdir(pkg) if os.path.isfile(os.path.join(pkg, f)) and f.endswith('egg') and not 'dev' in f] print("Found %s (%d files)" % (os.path.basename(pkg), len(files))) if files: mirror_dir = self.get_mirror_dirname(os.path.basename(pkg)) cmd = ['/usr/bin/rsync', '-av', '--ignore-existing'] cmd.extend(os.path.abspath(i).strip() for i in files) cmd.append(os.path.join(target_host + ':' + target_root, mirror_dir)) work.append(cmd) if work: print("Creating target root dirs") run(['/usr/bin/ssh', target_host, 'mkdir -p ' + ' '.join(target_dirs)]) # Using multiprocessing here to multiplex the transfers if subprocesses > 1: pool = Pool(processes=subprocesses) pool.map(run, work) else: map(run, work) self.unpack_eggs(files, target_host, target_root) else: print("Nothing to do.")
def run_with_coverage(cmd, pytestconfig, coverage=None, cd=None, **kwargs): """ Run a given command with coverage enabled. This won't make any sense if the command isn't a python script. This must be run within a pytest session that has been setup with the '--cov=xxx' options, and therefore requires the pytestconfig argument that can be retrieved from the standard pytest funcarg of the same name. Parameters ---------- cmd: `List` Command to run pytestconfig: `pytest._config.Config` Pytest configuration object coverage: `str` Path to the coverage executable cd: `str` If not None, will change to this directory before running the cmd. This is the directory that the coverage files will be created in. kwargs: keyword arguments Any extra arguments to pass to `pkglib.cmdline.run` Returns ------- `str` standard output Examples -------- >>> def test_example(pytestconfig): ... cmd = ['python','myscript.py'] ... run_with_coverage(cmd, pytestconfig) """ if isinstance(cmd, str): cmd = [cmd] if coverage is None: coverage = [sys.executable, '-mcoverage.__main__'] elif isinstance(coverage, str): coverage = [coverage] args = coverage + ['run', '-p'] if pytestconfig.option.cov_source: source_dirs = ",".join(pytestconfig.option.cov_source) args += ['--source=%s' % source_dirs] args += cmd if cd: with cmdline.chdir(cd): return cmdline.run(args, **kwargs) return cmdline.run(args, **kwargs)
def run_graph_easy(entries, renderer, outfile=None): """ Given the path edge entries, run the graphing tools and produce the output. Parameters ---------- entries : `list` Path edges renderer : `str` One of 'ascii', 'boxart' or 'graphviz' outfile : `str` File to save to, only for graphviz. If None, it will delete the generated file. """ if not CONFIG.graph_easy: log.warn("Graph-Easy not configured, please set graph_easy variable in pkglib config") return from path import path if renderer == 'graphviz' and not os.getenv('DISPLAY'): log.info("No DISPLAY set, using ascii renderer") renderer = 'ascii' instream = '\n'.join(entries) if os.path.isfile(CONFIG.graph_easy / 'bin' / 'graph-easy'): with cmdline.chdir(CONFIG.graph_easy / 'lib'): if renderer == 'graphviz': delete = False if not outfile: tmpdir = path(tempfile.mkdtemp()) outfile = tmpdir / 'depgraph.png' delete = True outfile = path(outfile) outfile = outfile.abspath() cmdline.run(['-c', ('../bin/graph-easy --as={0} | /usr/bin/dot -Tpng -o {1}' .format(renderer, outfile))], capture_stdout=False, stdin=instream, shell=True) if not outfile.isfile: log.error("Failed to create image file.") return webbrowser.open('file://%s' % outfile) if delete: time.sleep(5) shutil.rmtree(tmpdir) else: log.info("Created graph at %s" % outfile) else: cmdline.run(['../bin/graph-easy', '--as=%s' % renderer], capture_stdout=False, stdin=instream) return log.warn("Can't find graphing tool at %s" % CONFIG.graph_easy)
def test_cmdline_run_passes_stderr_if_not_captured(): cmd = get_python_cmd('sys.stderr.write("stderr")') cmd = ", ".join(['\'%s\'' % s for s in cmd]) p = subprocess.Popen([ sys.executable, '-c', """from pkglib_util import cmdline cmdline.run([%s], capture_stderr=False)""" % cmd ], stderr=PIPE) _, err = p.communicate() assert p.returncode == 0 assert 'stderr' in err.decode('utf-8')
def run_cleanup_in_subprocess(self): """ Runs the cleanup job in a subprocess. Necessary as setup.py is often changing the state of the virtualenv as it goes, and working_set in memory might not reflect the real state of the world """ # Using the entry point here instead of the module. This is because the # module may have been moved by the time we go to run it, eg if we just # updated pkglib itself. cmd = [sys.executable, os.path.join(sys.exec_prefix, "bin", "pycleanup")] for arg in ["-v", "-n", "--verbose", "--dry-run"]: if arg in sys.argv: cmd.append(arg) cmdline.run(cmd, capture_stdout=False)
def run_cleanup_in_subprocess(self): """ Runs the cleanup job in a subprocess. Necessary as setup.py is often changing the state of the virtualenv as it goes, and working_set in memory might not reflect the real state of the world """ # Using the entry point here instead of the module. This is because the # module may have been moved by the time we go to run it, eg if we just # updated pkglib itself. cmd = [sys.executable, os.path.join(sys.exec_prefix, 'bin', 'pycleanup')] for arg in ['-v', '-n', '--verbose', '--dry-run']: if arg in sys.argv: cmd.append(arg) cmdline.run(cmd, capture_stdout=False)
def __init__(self, executable): self.log.info("Reading Python installation details from: %s" % executable) cmd = [ executable, "-c", inspect.getsource(_python_info_dump) + '\n_python_info_dump()' ] info = cmdline.run(cmd, env=clean_env_of_python_vars( remove_ld_library_path=True)).split("\n") self.version = LooseVersion(info[0] + (".debug" if info[3] == "1" else "")) self.prefix = os.path.realpath(info[1]) self.real_prefix = os.path.realpath(info[2]) self.hexversion = info[4] self.subversion = info[5] self.platform = info[6] self.libdir = info[7] self.executable = executable self.bindir = os.path.dirname(self.executable) real_prefix_executable = os.path.join(self.real_prefix, "bin", "python") self.real_executable = (real_prefix_executable if ( os.path.isfile(real_prefix_executable) and os.access(real_prefix_executable, os.X_OK)) else executable)
def get_open_files(self): """ Returns open files under our site-packages """ # It's far cheaper to run lsof for all files and search later than # running it with the +D option to only return results under a certain # directory # TODO: this might not be on the path, and be hidden by the >/dev/null cmd = ("lsof 2>/dev/null | grep {0} |" "awk '{{ print $2 " " $9 }}'").format(self.site_packages) return [i.split() for i in cmdline.run(cmd, capture_stdout=True, check_rc=False, shell=True).split("\n") if i]
def get_open_files(self): """ Returns open files under our site-packages """ # It's far cheaper to run lsof for all files and search later than # running it with the +D option to only return results under a certain # directory # TODO: this might not be on the path, and be hidden by the >/dev/null cmd = ("lsof 2>/dev/null | grep {0} |" "awk '{{ print $2 " " $9 }}'").format(self.site_packages) return [i.split() for i in cmdline.run(cmd, capture_stdout=True, check_rc=False, shell=True).split('\n') if i]
def create_virtualenv(dest, virtualenv_cmd=None, virtualenv_args='', python=None, verbose=False): """ Creates new Python virtual environment. Parameters ---------- dest : `str` Destination directory path virtualenv_cmd : `str` a path to virtualenv script which will be used to create the virtual environment. If not provided an attempt will be made to get the path for virtualenv script from CONFIG.virtualenv_executable, or failing that the following environment variables: * "VIRTUALENV_CMD" * "VIRTUALENVWRAPPER_VIRTUALENV" A RuntimeError is raised if virtualenv command cannot be resolved. virtualenv_args : `str` arguments to pass to virtualenv script python : `str` a path to Python executable which virtualenv will be using (defaults to current Python executable). If path is not found a RuntimeError is raised. verbose : `bool` whether to show command output (defaults to False) """ virtualenv_cmd = resolve_virtualenv_cmd(virtualenv_cmd) python = resolve_python(python) base = os.path.dirname(dest) if not os.path.isdir(base): os.makedirs(base) get_log().info("Creating new Python virtual environment at: %s" % dest) out = cmdline.run( [virtualenv_cmd, virtualenv_args, "-p", python, dest], capture_stdout=True, env=clean_env_of_python_vars(remove_ld_library_path=True)) if verbose: print(out) out = re.match("New +python +executable +in +([^\n ]*)", out) executable = out.group(1) if out else os.path.join(dest, "bin", "python") if not os.path.isfile(executable) or not os.access(executable, os.X_OK): raise RuntimeError("Unable to verify the location of created " "Python executable at: %s" % dest) return os.path.realpath(executable)
def create_virtualenv(dest, virtualenv_cmd=None): """ Create Python Virtualenv for deployment. Unsets ``PYTHONPATH`` to ensure it is a clean build (I'm looking at you, Eclipse..) Parameters ---------- dest : `str` Destination directory path """ if virtualenv_cmd == None: virtualenv_cmd = CONFIG.virtualenv_executable print "Creating virtualenv at %s" % dest base = os.path.dirname(dest) if not os.path.isdir(base): os.makedirs(base) env = dict(os.environ) if 'PYTHONPATH' in env: del(env['PYTHONPATH']) cmdline.run([virtualenv_cmd, dest, '--distribute'], env=env)
def unpack_eggs(self, files, target_host, target_root): """ Unpacks all eggs on the target host and root """ print "Unpacking eggs: %r" % files target_eggs = [os.path.join(target_root, self.get_file_target(f)) for f in files] cmd = """set -x for EGG in %s; do if [ -f $EGG ]; then echo Unzipping $EGG ZIPFILE=./.tmp.`basename $EGG` mv $EGG $ZIPFILE && \ mkdir $EGG && \ unzip -q $ZIPFILE -d $EGG && \ rm $ZIPFILE && \ chmod -R 555 $EGG fi done""" % ' '.join(target_eggs) print("Running cmd on %s" % target_host) print(cmd) run(['/usr/bin/ssh', target_host, cmd])
def setup(): """ Mirror pkglib's setup() method for each sub-package in this repository. """ top_level_parser = parse.get_pkg_cfg_parser() cfg = parse.parse_section(top_level_parser, 'multipkg', ['pkg_dirs']) rc = [0] for dirname in cfg['pkg_dirs']: with cmdline.chdir(dirname): # Update sub-package setup.cfg with top-level version if it's specified if 'version' in cfg: sub_parser = parse.get_pkg_cfg_parser() sub_cfg = parse.parse_pkg_metadata(sub_parser) if sub_cfg['version'] != cfg['version']: print("Updating setup.cfg version for {0}: {1} -> {2}". format(dirname, sub_cfg['version'], cfg['version'])) sub_parser.set('metadata', 'version', cfg['version']) with open('setup.cfg', 'w') as sub_cfg_file: sub_parser.write(sub_cfg_file) cmd = [sys.executable, "setup.py"] + sys.argv[1:] print("In directory {0}: Running '{1}'".format( dirname, ' '.join(cmd))) try: cmdline.run(cmd, capture_stdout=False, bufsize=0) except subprocess.CalledProcessError as e: # Here we exit straight away, unless this was a run as # 'python setup.py test'. Reason for this is that we want to # run all the packages' tests through and gather the results. # Exception: using the -x/--exitfirst option. # For any other setup.py command, a failure here is likely # some sort of build or config issue and it's best not to # plow on. print "Command failed with exit code {0}".format(e.returncode) if 'test' in cmd and not '-x' in ' '.join(cmd) \ and not '--exitfirst' in ' '.join(cmd): rc[0] = e.returncode else: sys.exit(e.returncode) sys.exit(rc[0])
def setup(): """ Mirror pkglib's setup() method for each sub-package in this repository. """ top_level_parser = parse.get_pkg_cfg_parser() cfg = parse.parse_section(top_level_parser, 'multipkg', ['pkg_dirs']) rc = [0] for dirname in cfg['pkg_dirs']: with cmdline.chdir(dirname): # Update sub-package setup.cfg with top-level version if it's specified if 'version' in cfg: sub_parser = parse.get_pkg_cfg_parser() sub_cfg = parse.parse_pkg_metadata(sub_parser) if sub_cfg['version'] != cfg['version']: print ("Updating setup.cfg version for {0}: {1} -> {2}" .format(dirname, sub_cfg['version'], cfg['version'])) sub_parser.set('metadata', 'version', cfg['version']) with open('setup.cfg', 'w') as sub_cfg_file: sub_parser.write(sub_cfg_file) cmd = [sys.executable, "setup.py"] + sys.argv[1:] print ("In directory {0}: Running '{1}'" .format(dirname, ' '.join(cmd))) try: cmdline.run(cmd, capture_stdout=False, bufsize=0) except subprocess.CalledProcessError as e: # Here we exit straight away, unless this was a run as # 'python setup.py test'. Reason for this is that we want to # run all the packages' tests through and gather the results. # Exception: using the -x/--exitfirst option. # For any other setup.py command, a failure here is likely # some sort of build or config issue and it's best not to # plow on. print "Command failed with exit code {0}".format(e.returncode) if 'test' in cmd and not '-x' in ' '.join(cmd) \ and not '--exitfirst' in ' '.join(cmd): rc[0] = e.returncode else: sys.exit(e.returncode) sys.exit(rc[0])
def unpack_eggs(self, files, target_host, target_root): """ Unpacks all eggs on the target host and root """ print "Unpacking eggs: %r" % files target_eggs = [ os.path.join(target_root, self.get_file_target(f)) for f in files ] cmd = """set -x for EGG in %s; do if [ -f $EGG ]; then echo Unzipping $EGG ZIPFILE=./.tmp.`basename $EGG` mv $EGG $ZIPFILE && \ mkdir $EGG && \ unzip -q $ZIPFILE -d $EGG && \ rm $ZIPFILE && \ chmod -R 555 $EGG fi done""" % ' '.join(target_eggs) print("Running cmd on %s" % target_host) print(cmd) run(['/usr/bin/ssh', target_host, cmd])
def create_virtualenv(dest, virtualenv_cmd=None, virtualenv_args='', python=None, verbose=False): """ Creates new Python virtual environment. Parameters ---------- dest : `str` Destination directory path virtualenv_cmd : `str` a path to virtualenv script which will be used to create the virtual environment. If not provided an attempt will be made to get the path for virtualenv script from CONFIG.virtualenv_executable, or failing that the following environment variables: * "VIRTUALENV_CMD" * "VIRTUALENVWRAPPER_VIRTUALENV" A RuntimeError is raised if virtualenv command cannot be resolved. virtualenv_args : `str` arguments to pass to virtualenv script python : `str` a path to Python executable which virtualenv will be using (defaults to current Python executable). If path is not found a RuntimeError is raised. verbose : `bool` whether to show command output (defaults to False) """ virtualenv_cmd = resolve_virtualenv_cmd(virtualenv_cmd) python = resolve_python(python) base = os.path.dirname(dest) if not os.path.isdir(base): os.makedirs(base) get_log().info("Creating new Python virtual environment at: %s" % dest) out = cmdline.run([virtualenv_cmd, virtualenv_args, "-p", python, dest], capture_stdout=True, env=clean_env_of_python_vars(remove_ld_library_path=True)) if verbose: print(out) out = re.match("New +python +executable +in +([^\n ]*)", out) executable = out.group(1) if out else os.path.join(dest, "bin", "python") if not os.path.isfile(executable) or not os.access(executable, os.X_OK): raise RuntimeError("Unable to verify the location of created " "Python executable at: %s" % dest) return os.path.realpath(executable)
def __init__(self, executable): self.log.info("Reading Python installation details from: %s" % executable) cmd = [executable, "-c", inspect.getsource(_python_info_dump) + '\n_python_info_dump()'] info = cmdline.run(cmd, env=clean_env_of_python_vars(remove_ld_library_path=True)).split("\n") self.version = LooseVersion(info[0] + (".debug" if info[3] == "1" else "")) self.prefix = os.path.realpath(info[1]) self.real_prefix = os.path.realpath(info[2]) self.hexversion = info[4] self.subversion = info[5] self.platform = info[6] self.libdir = info[7] self.executable = executable self.bindir = os.path.dirname(self.executable) real_prefix_executable = os.path.join(self.real_prefix, "bin", "python") self.real_executable = (real_prefix_executable if (os.path.isfile(real_prefix_executable) and os.access(real_prefix_executable, os.X_OK)) else executable)
def run(self, cmd, **kwargs): return cmdline.run(cmd, env=self._make_env(), **kwargs)
def test_cmdline_run_raises_on_failure(): cmd = get_python_cmd(exit_code=1) with raises(CalledProcessError): cmdline.run(cmd)
def mirror_eggs(self, file_root, target_host, target_root, target_packages=None, subprocesses=10): """ Mirrors egg files from this PyPi instance to a target host and path. Used for filling out a cache that can be used by CONFIG.installer_search_path. Parameters ---------- file_root : `str` filesystem path to the root of the file store target_host : `str` host to mirror to target_root : `str` filesystem path to mirror to on target host target_packages : `list` or None list of packages to mirror. Use None for all. subprocesses : `int` number of subprocesses to spawn when doing the mirror """ pkg_dirs, target_dirs = self.get_mirror_targets( file_root, target_root, target_packages) work = [] for pkg in pkg_dirs: # Filter non-egg and dev packages out, as this is a site-packages # mirror which won't work with source packages. files = [ os.path.join(pkg, f) for f in os.listdir(pkg) if os.path.isfile(os.path.join(pkg, f)) and f.endswith('egg') and not 'dev' in f ] print("Found %s (%d files)" % (os.path.basename(pkg), len(files))) if files: mirror_dir = self.get_mirror_dirname(os.path.basename(pkg)) cmd = ['/usr/bin/rsync', '-av', '--ignore-existing'] cmd.extend(os.path.abspath(i).strip() for i in files) cmd.append( os.path.join(target_host + ':' + target_root, mirror_dir)) work.append(cmd) if work: print("Creating target root dirs") run([ '/usr/bin/ssh', target_host, 'mkdir -p ' + ' '.join(target_dirs) ]) # Using multiprocessing here to multiplex the transfers if subprocesses > 1: pool = Pool(processes=subprocesses) pool.map(run, work) else: map(run, work) self.unpack_eggs(files, target_host, target_root) else: print("Nothing to do.")
def run_graph_easy(entries, renderer, outfile=None): """ Given the path edge entries, run the graphing tools and produce the output. Parameters ---------- entries : `list` Path edges renderer : `str` One of 'ascii', 'boxart' or 'graphviz' outfile : `str` File to save to, only for graphviz. If None, it will delete the generated file. """ if not CONFIG.graph_easy: log.warn( "Graph-Easy not configured, please set graph_easy variable in pkglib config" ) return from path import path if renderer == 'graphviz' and not os.getenv('DISPLAY'): log.info("No DISPLAY set, using ascii renderer") renderer = 'ascii' instream = '\n'.join(entries) if os.path.isfile(CONFIG.graph_easy / 'bin' / 'graph-easy'): with cmdline.chdir(CONFIG.graph_easy / 'lib'): if renderer == 'graphviz': delete = False if not outfile: tmpdir = path(tempfile.mkdtemp()) outfile = tmpdir / 'depgraph.png' delete = True outfile = path(outfile) outfile = outfile.abspath() cmdline.run([ '-c', ('../bin/graph-easy --as={0} | /usr/bin/dot -Tpng -o {1}'. format(renderer, outfile)) ], capture_stdout=False, stdin=instream, shell=True) if not outfile.isfile: log.error("Failed to create image file.") return webbrowser.open('file://%s' % outfile) if delete: time.sleep(5) shutil.rmtree(tmpdir) else: log.info("Created graph at %s" % outfile) else: cmdline.run(['../bin/graph-easy', '--as=%s' % renderer], capture_stdout=False, stdin=instream) return log.warn("Can't find graphing tool at %s" % CONFIG.graph_easy)