def do_purge(bare=False, downloads=False, allow_global=False): """Executes the purge functionality.""" if downloads: if not bare: click.echo(crayons.yellow('Clearing out downloads directory...')) shutil.rmtree(project.download_location) return freeze = delegator.run('{0} freeze'.format(which_pip(allow_global=allow_global))).out installed = freeze.split() # Remove setuptools and friends from installed, if present. for package_name in ['setuptools', 'pip', 'wheel', 'six', 'packaging', 'pyparsing', 'appdirs']: for i, package in enumerate(installed): if package.startswith(package_name): del installed[i] if not bare: click.echo('Found {0} installed package(s), purging...'.format(len(installed))) command = '{0} uninstall {1} -y'.format(which_pip(allow_global=allow_global), ' '.join(installed)) c = delegator.run(command) if not bare: click.echo(crayons.blue(c.out)) click.echo(crayons.yellow('Environment now purged and fresh!'))
def test_remove_package_from_pipfile(self): proj = pipenv.project.Project() # Create test space. delegator.run('mkdir test_remove_from_pipfile') with open('test_remove_from_pipfile/Pipfile', 'w') as f: f.write('[[source]]\nurl = \'https://pypi.python.org/simple\'\n' 'verify_ssl = true\n\n\n[packages]\n' 'requests = { extras = [\'socks\'] }\nFlask = \'*\'\n\n\n' '[dev-packages]\nclick = \'*\'\n') proj._pipfile_location = 'test_remove_from_pipfile/Pipfile' # Confirm initial state of Pipfile. p = proj.parsed_pipfile assert list(p['packages'].keys()) == ['requests', 'Flask'] assert list(p['dev-packages'].keys()) == ['click'] # Remove requests from packages and click from dev-packages. proj.remove_package_from_pipfile('requests') proj.remove_package_from_pipfile('click', dev=True) p = proj.parsed_pipfile # Cleanup test space. delegator.run('rm -fr test_remove_from_pipfile') # Confirm state of Pipfile. assert 'Flask' in p['packages'] assert len(p['packages']) == 1 assert 'dev-packages' not in p
def test_add_package_to_pipfile(self): proj = pipenv.project.Project() # Create test space. delegator.run('mkdir test_add_to_pipfile') with open('test_add_to_pipfile/Pipfile', 'w') as f: f.write('[[source]]\nurl = \'https://pypi.python.org/simple\'\n' 'verify_ssl = true\n\n\n[packages]\n' 'requests = { extras = [\'socks\'] }') proj._pipfile_location = 'test_add_to_pipfile/Pipfile' proj.add_package_to_pipfile('Flask') proj.add_package_to_pipfile('Django==1.10.1', dev=True) p = proj.parsed_pipfile # Cleanup test space. delegator.run('rm -fr test_add_to_pipfile') # Confirm Flask added to packages. assert 'Flask' in p['packages'] assert p['packages']['Flask'] == '*' # Confirm Django added to dev-packages. assert 'Django' in p['dev-packages'] assert p['dev-packages']['Django'] == '==1.10.1'
def ensure_latest_pip(): """Updates pip to the latest version.""" # Ensure that pip is installed. c = delegator.run('{0} install pip'.format(which_pip())) # Check if version is out of date. if 'however' in c.err: # If version is out of date, update. click.echo(crayons.yellow('Pip is out of date... updating to latest.')) c = delegator.run('{0} install pip --upgrade'.format(which_pip()), block=False) click.echo(crayons.blue(c.out))
def ensure_buildpack(self): assert self.buildpack untargz = False clone = False if self.buildpack.endswith(".tgz") or self.buildpack.endswith(".tar.gz"): untargz = True else: clone = True if untargz: self.logger.info("Downloading buildpack...") r = requests.get(self.buildpack, stream=False) self.logger.info("Extracting buildpack...") b = io.BytesIO(r.content) t = tarfile.open(mode="r:gz", fileobj=b) t.extractall(path=self.custom_buildpack_path) elif unzip: r = requests.get(self.buildpack) elif clone: cmd = f"git clone {self.buildpack} {self.custom_buildpack_path}" self.logger.debug(f"$ {cmd}") c = delegator.run(cmd) assert c.ok
def _raw_search(tmpdir, asin=None, keyword=None, searchindex=None): with open("/tmp/awspa-searches.log", "a") as f: line = "{}\tasin={!r}\tkeyword={!r}\tsearchindex={!r}\n".format( int(time.time()), asin, keyword, searchindex ) f.write(line) print("AWSPA_SEARCH:", line) filename = os.path.join(tmpdir, "out.json") cli_path = settings.BASE_DIR / "awspa/cli.js" if asin: command = "node {} --asin={} --out={}".format(cli_path, asin, filename) else: assert keyword assert searchindex command = 'node {} --searchindex={} --out={} "{}"'.format( cli_path, searchindex, filename, shlex.quote(keyword) ) # print(command) r = delegator.run(command) if r.return_code: err_out = r.err if "You are submitting requests too quickly" in err_out: raise RateLimitedError(err_out) raise SubprocessError( "Return code: {}\tError: {}".format(r.return_code, err_out) ) with open(filename) as f: out = f.read() try: return json.loads(out) except json.decoder.JSONDecodeError: raise BadSearchResult(out)
def do_create_virtualenv(three=None, python=None): """Creates a virtualenv.""" click.echo(crayons.yellow('Creating a virtualenv for this project...'), err=True) # The user wants the virtualenv in the project. if PIPENV_VENV_IN_PROJECT: cmd = ['virtualenv', project.virtualenv_location, '--prompt=({0})'.format(project.name)] else: # Default: use pew. cmd = ['pew', 'new', project.name, '-d'] # Pass a Python version to virtualenv, if needed. if python: click.echo('{0} {1} {2}'.format(crayons.yellow('Using'), crayons.red(python), crayons.yellow('to create virtualenv...'))) elif three is False: python = 'python2' elif three is True: python = 'python3' if python: cmd = cmd + ['-p', python] # Actually create the virtualenv. with spinner(): c = delegator.run(cmd, block=False) click.echo(crayons.blue(c.out), err=True) # Say where the virtualenv is. do_where(virtualenv=True, bare=False)
def ensure_docker(self): if self.allow_insecure and self.registry_specified: logger.debug("Configuring docker service to allow our insecure registry...") # Configure our registry as insecure. try: with open("/etc/docker/daemon.json", "w") as f: data = {"insecure-registries": [self.registry_specified]} json.dump(data, f) # This fails when running on Windows... except FileNotFoundError: pass # Start docker service. self.logger.info("Starting docker") c = delegator.run("service docker start") # assert c.ok time.sleep(0.3) try: # Login to Docker. if self.requires_login: self.docker(f"login -u {self.username} -p {self.password}") c = self.docker("ps") assert c.ok except AssertionError: raise RuntimeError("Docker is not available.")
def check(three=None, python=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python, validate=False) click.echo(crayons.yellow('Checking PEP 508 requirements...')) # Run the PEP 508 checker in the virtualenv. c = delegator.run('{0} {1}'.format(which('python'), pep508checker.__file__.rstrip('cdo'))) results = json.loads(c.out) # Load the pipfile. p = pipfile.Pipfile.load(project.pipfile_location) failed = False # Assert each specified requirement. for marker, specifier in p.data['_meta']['requires'].items(): if marker in results: try: assert results[marker] == specifier except AssertionError: failed = True click.echo('Specifier {0} does not match {1} ({2}).'.format(crayons.green(marker), crayons.blue(specifier), crayons.red(results[marker]))) if failed: click.echo(crayons.red('Failed!')) sys.exit(1) else: click.echo(crayons.green('Passed!'))
def test_local_vcs_urls_work(PipenvInstance, pypi): with PipenvInstance(pypi=pypi, chdir=True) as p: six_path = Path(p.path).joinpath("six").absolute() c = delegator.run( "git clone " "https://github.com/benjaminp/six.git {0}".format(six_path) ) assert c.return_code == 0 c = p.pipenv("install git+{0}#egg=six".format(six_path.as_uri())) assert c.return_code == 0
def load_path(python): from ._compat import Path import delegator import json python = Path(python).as_posix() json_dump_commmand = '"import json, sys; print(json.dumps(sys.path));"' c = delegator.run('"{0}" -c {1}'.format(python, json_dump_commmand)) if c.return_code == 0: return json.loads(c.out.strip()) else: return []
def docker(self, cmd, assert_ok=True, fail=True): cmd = f"docker {cmd}" self.logger.debug(f"$ {cmd}") c = delegator.run(cmd) try: assert c.ok except AssertionError as e: self.logger.debug(c.out) self.logger.debug(c.err) if fail: raise e return c
def test_parsed_pipfile(self): proj = pipenv.project.Project() # Create test space. delegator.run('mkdir test_pipfile') with open('test_pipfile/Pipfile', 'w') as f: f.write('[[source]]\nurl = \'https://pypi.python.org/simple\'\n' 'verify_ssl = true\n\n\n[packages]\n' 'requests = { extras = [\'socks\'] }') proj._pipfile_location = 'test_pipfile/Pipfile' pfile = proj.parsed_pipfile # Cleanup test space. delegator.run('rm -fr test_pipfile') # Confirm source added correctly. assert 'source' in pfile assert pfile['source'][0]['url'] == 'https://pypi.python.org/simple' # Confirm requests is in packages as expected. assert 'packages' in pfile assert pfile['packages']['requests'] == {'extras': ['socks']}
def virtualenv_location(self): # Use cached version, if available. if self._virtualenv_location: return self._virtualenv_location # The user wants the virtualenv in the project. if not PIPENV_VENV_IN_PROJECT: c = delegator.run('pew dir {0}'.format(self.name)) loc = c.out.strip() # Default mode. else: loc = os.sep.join(self.pipfile_location.split(os.sep)[:-1] + ['.venv']) self._virtualenv_location = loc return loc
def python_version(path_to_python): import delegator if not path_to_python: return None try: c = delegator.run([path_to_python, "--version"], block=False) except Exception: return None c.block() version = parse_python_version(c.out.strip() or c.err.strip()) try: version = u"{major}.{minor}.{micro}".format(**version) except TypeError: return None return version
def venv_resolve_deps( deps, which, project, pre=False, verbose=False, clear=False, allow_global=False, pypi_mirror=None, ): from .vendor import delegator from . import resolver import json if not deps: return [] resolver = escape_grouped_arguments(resolver.__file__.rstrip("co")) cmd = "{0} {1} {2} {3} {4} {5}".format( escape_grouped_arguments(which("python", allow_global=allow_global)), resolver, "--pre" if pre else "", "--verbose" if verbose else "", "--clear" if clear else "", "--system" if allow_global else "", ) with temp_environ(): os.environ["PIPENV_PACKAGES"] = "\n".join(deps) if pypi_mirror: os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror) c = delegator.run(cmd, block=True) try: assert c.return_code == 0 except AssertionError: if verbose: click_echo(c.out, err=True) click_echo(c.err, err=True) else: click_echo(c.err[int(len(c.err) / 2) - 1 :], err=True) sys.exit(c.return_code) if verbose: click_echo(c.out.split("RESULTS:")[0], err=True) try: return json.loads(c.out.split("RESULTS:")[1].strip()) except IndexError: raise RuntimeError("There was a problem with locking.")
def convert_to_rst(fname): cmd = "jupyter-nbconvert --to rst %s" % fname c = delegator.run(cmd) base_name = os.path.splitext(fname)[0] image_files = "%s_files" % base_name new_path = os.path.join(target_dir, image_files) if os.path.isdir(new_path): shutil.rmtree(new_path) if os.path.isdir(image_files): shutil.move(image_files, target_dir) rst_file = '%s.rst' % base_name new_path = os.path.join(target_dir, rst_file) if os.path.isfile(new_path): os.remove(new_path) shutil.move(rst_file, target_dir)
def uninstall(package_name=False, more_packages=False, three=None, python=False, system=False, lock=False, dev=False, all=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python) package_names = (package_name,) + more_packages pipfile_remove = True # Un-install all dependencies, if --all was provided. if all is True: if not dev: click.echo(crayons.yellow('Un-installing all packages from virtualenv...')) do_purge(allow_global=system) sys.exit(0) # Uninstall [dev-packages], if --dev was provided. if dev: if 'dev-packages' in project.parsed_pipfile: click.echo(crayons.yellow('Un-installing {0}...'.format(crayons.red('[dev-packages]')))) package_names = project.parsed_pipfile['dev-packages'] pipfile_remove = False else: click.echo(crayons.yellow('No {0} to uninstall.'.format(crayons.red('[dev-packages]')))) sys.exit(0) if package_name is False and not dev: click.echo(crayons.red('No package provided!')) sys.exit(1) for package_name in package_names: click.echo('Un-installing {0}...'.format(crayons.green(package_name))) c = delegator.run('{0} uninstall {1} -y'.format(which_pip(allow_global=system), package_name)) click.echo(crayons.blue(c.out)) if pipfile_remove: if dev: click.echo('Removing {0} from Pipfile\'s {1}...'.format(crayons.green(package_name), crayons.red('[dev-packages]'))) else: click.echo('Removing {0} from Pipfile\'s {1}...'.format(crayons.green(package_name), crayons.red('[packages]'))) project.remove_package_from_pipfile(package_name, dev) if lock: do_lock()
def _clean_with_csso(cssstring): with tempfile.TemporaryDirectory() as dir_: input_fn = os.path.join(dir_, "input.css") output_fn = os.path.join(dir_, "output.css") with open(input_fn, "w") as f: f.write(cssstring) command = "node {} -i {} -o {}".format( settings.CSSO_CLI_BINARY, input_fn, output_fn ) r = delegator.run(command) if r.return_code: raise RuntimeError( "Return code: {}\tError: {}".format(r.return_code, r.err) ) with open(output_fn) as f: output = f.read() return output
def run_open(module, three=None, python=None, pypi_mirror=None): from .core import which, ensure_project # Ensure that virtualenv is available. ensure_project(three=three, python=python, validate=False, pypi_mirror=pypi_mirror) c = delegator.run( '{0} -c "import {1}; print({1}.__file__);"'.format(which("python"), module) ) try: assert c.return_code == 0 except AssertionError: echo(crayons.red("Module not found!")) sys.exit(1) if "__init__.py" in c.out: p = os.path.dirname(c.out.strip().rstrip("cdo")) else: p = c.out.strip().rstrip("cdo") echo(crayons.normal("Opening {0!r} in your EDITOR.".format(p), bold=True)) edit(filename=p) sys.exit(0)
def get_downloads_info(names_map, section): info = [] p = project.parsed_pipfile for fname in os.listdir(project.download_location): # Get name from filename mapping. name = list(convert_deps_from_pip(names_map[fname]))[0] # Get the version info from the filenames. version = parse_download_fname(fname, name) # Get the hash of each file. c = delegator.run('{0} hash {1}'.format(which_pip(), os.sep.join([project.download_location, fname]))) hash = c.out.split('--hash=')[1].strip() # Verify we're adding the correct version from Pipfile # and not one from a dependency. specified_version = p[section].get(name, '') if is_required_version(version, specified_version): info.append(dict(name=name, version=version, hash=hash)) return info
def python_version(path_to_python): if not path_to_python: return None try: c = delegator.run([path_to_python, '--version'], block=False) except Exception: return None output = c.out.strip() or c.err.strip() @parse.with_pattern(r'.*') def allow_empty(text): return text TEMPLATE = 'Python {}.{}.{:d}{:AllowEmpty}' parsed = parse.parse(TEMPLATE, output, dict(AllowEmpty=allow_empty)) if parsed: parsed = parsed.fixed else: return None return u"{v[0]}.{v[1]}.{v[2]}".format(v=parsed)
def test_install_local_vcs_not_in_lockfile(PipenvInstance, pip_src_dir): with PipenvInstance(chdir=True) as p: six_path = os.path.join(p.path, "six") c = delegator.run( "git clone https://github.com/benjaminp/six.git {0}".format(six_path) ) assert c.return_code == 0 c = p.pipenv("install -e ./six") assert c.return_code == 0 six_key = list(p.pipfile["packages"].keys())[0] c = p.pipenv( "install -e git+https://github.com/requests/requests.git#egg=requests" ) assert c.return_code == 0 c = p.pipenv("lock") assert c.return_code == 0 assert "requests" in p.pipfile["packages"] assert "requests" in p.lockfile["default"] # This is the hash of ./six assert six_key in p.pipfile["packages"] assert six_key in p.lockfile["default"] # The hash isn't a hash anymore, its actually the name of the package (we now resolve this) assert "six" in p.pipfile["packages"]
def create_key_pair(key_path): if not os.path.isfile(key_path): delegator.run('ssh-keygen -b 2048 -t rsa -f %s -q -N ""' % key_path)
def logs(name, ns): return delegator.run("kubectl logs deployment/{} -n {}".format( name, ns)).out
def compile_requirements(input_filename, output_filename): delegator.run('pip-compile --output-file {1} {2}'.format( "", output_filename, input_filename))
def test_pipenv_uninstall(self): delegator.run('mkdir test_pipenv_uninstall') os.chdir('test_pipenv_uninstall') # Build the environment. os.environ['PIPENV_VENV_IN_PROJECT'] = '1' assert delegator.run('touch Pipfile').return_code == 0 assert delegator.run('pipenv --python python').return_code == 0 # Add entries to Pipfile. assert delegator.run('pipenv install Werkzeug').return_code == 0 assert delegator.run('pipenv install pytest --dev').return_code == 0 pipfile_output = delegator.run('cat Pipfile').out pipfile_list = pipfile_output.split('\n') assert 'werkzeug = "*"' in pipfile_list assert 'pytest = "*"' in pipfile_list assert '[packages]' in pipfile_list assert '[dev-packages]' in pipfile_list # Uninstall from dev-packages, removing TOML section. assert delegator.run('pipenv uninstall pytest').return_code == 0 # Test uninstalling non-existant dependency. c = delegator.run('pipenv uninstall NotAPackage') assert c.return_code == 0 assert 'No package NotAPackage to remove from Pipfile.' in c.out pipfile_output = delegator.run('cat Pipfile').out pipfile_list = pipfile_output.split('\n') assert 'Werkzeug = "*"' in pipfile_list assert 'pytest = "*"' not in pipfile_list assert '[packages]' in pipfile_list # assert '[dev-packages]' not in pipfile_list os.chdir('..') delegator.run('rm -fr test_pipenv_uninstall')
def pip_download(package_name): c = delegator.run('{0} download "{1}" -d {2}'.format(which_pip(), package_name, project.download_location)) return c
def set_provsioning_profile(self, provision_file: str = None) -> None: """ Sets the provision file to use during patching. :param provision_file: :return: """ # have provision file? set it and be done if provision_file: self.provision_file = provision_file return click.secho('No provision file specified, searching for one...', bold=True) # locate a valid mobile provision on disk in: ~/Library/Developer/Xcode/DerivedData/ possible_provisions = [ os.path.join(dp, f) for dp, dn, fn in os.walk( os.path.expanduser('~/Library/Developer/Xcode/DerivedData/')) for f in fn if 'embedded.mobileprovision' in f ] if len(possible_provisions) <= 0: click.secho( 'No provisioning files found. Please specify one or generate one by building an app.', fg='red') return # we have some provisioning profiles, lets find the one # with the most days left current_time = datetime.datetime.now() expirations = {} for pf in possible_provisions: _, decoded_location = tempfile.mkstemp('decoded_provision') # Decode the mobile provision using macOS's security cms tool delegator.run(list2cmdline([ self.required_commands['security']['location'], 'cms', '-D', '-i', pf, '-o', decoded_location ]), timeout=self.command_run_timeout) # read the expiration date from the profile with open(decoded_location, 'rb') as f: parsed_data = plistlib.load(f) if parsed_data['ExpirationDate'] > current_time: expirations[ pf] = parsed_data['ExpirationDate'] - current_time click.secho('Found provision {0} expiring {1}'.format( pf, expirations[pf]), dim=True) # cleanup the temp path os.remove(decoded_location) # ensure that we got some valid mobileprovisions to work with if len(expirations) <= 0: click.secho( 'Could not find a non-expired provisioning file. Please specify or generate one.', fg='red') return # sort the results so that the mobileprovision with the most time is at # the top of the list click.secho('Found a valid provisioning profile', fg='green', bold=True) self.provision_file = sorted(expirations, key=expirations.get, reverse=True)[0]
def test_pipenv_run(self): working_dir = 'test_pipenv_run' delegator.run('mkdir {0}'.format(working_dir)) os.chdir(working_dir) # Build the environment. os.environ['PIPENV_VENV_IN_PROJECT'] = '1' delegator.run('touch Pipfile') # Install packages for test. # print(delegator.run('pipenv install pep8').err) assert delegator.run('pipenv install pep8').return_code == 0 assert delegator.run('pipenv install pytest').return_code == 0 # Run test commands. assert delegator.run( 'pipenv run python -c \'print("test")\'').return_code == 0 assert delegator.run('pipenv run pep8 --version').return_code == 0 assert delegator.run('pipenv run pytest --version').return_code == 0 os.chdir('..') delegator.run('rm -fr {0}'.format(working_dir))
def test_cowsay_run(self): c = delegator.run(['cowsay'], block=True) print (c.out)
def test_simple_usage(monkeypatch): """Check that cli shows prefixed variables.""" monkeypatch.setenv('SOM_TT_VALUE', '1') variables = delegator.run('dump-env -p SOM_TT_') assert variables.out == 'VALUE=1\n'
for num_latent in range(17, 21): for lr in [0.01, 0.1, 1, 2]: for iters in range(100, 2600, 400): OFILE = "{}/{}-{}-{}-{}-{}.out".format( SLURM_OUT, dataset, cur_fold, num_latent, lr, iters) EFILE = "{}/{}-{}-{}-{}-{}.err".format( SLURM_OUT, dataset, cur_fold, num_latent, lr, iters) SLURM_SCRIPT = "{}/tf-{}-{}-{}-{}-{}.pbs".format( SLURM_OUT, dataset, cur_fold, num_latent, lr, iters) CMD = 'python baseline-stf-nested-valid.py {} {} {} {} {}'.format( dataset, cur_fold, num_latent, lr, iters) lines = [] lines.append("#!/bin/sh\n") lines.append('#SBATCH --time=1-16:0:00\n') lines.append('#SBATCH --mem=64\n') #lines.append('#SBATCH -c 32\n') lines.append('#SBATCH --exclude=artemis[1-5]\n') lines.append('#SBATCH -o ' + '"' + OFILE + '"\n') lines.append('#SBATCH -e ' + '"' + EFILE + '"\n') lines.append(CMD + '\n') with open(SLURM_SCRIPT, 'w') as f: f.writelines(lines) command = ['sbatch', SLURM_SCRIPT] while len( delegator.run('squeue -u %s' % username).out.split( "\n")) > MAX_NUM_MY_JOBS + 2: time.sleep(DELAY_NUM_JOBS_EXCEEDED) delegator.run(command, block=False) print(SLURM_SCRIPT)
def publish(topic, message): command = 'mosquitto_pub -h "{host}" -t "{topic}" -m \'{message}\''.format( host='localhost', topic=topic, message=message) print 'Running command: {}'.format(command) delegator.run(command)
def tile_ESPA(ctx, srcs, dst, tilespec_name, absolute, suffix): """ Create VRT tiles from some Landsat ESPA products NOTE: very "hard-coded" currently, but WIP to be generic """ tilespec = TILESPECS[tilespec_name] dst = Path(dst) srcs = [Path(src) for src in srcs] n_expected_bands = None for src in srcs: if src.suffix != '.xml': raise click.BadParameter('Must specify ESPA XML metadata files as ' 'SRCS...: {1}'.format(str(src))) pattern = PATTERNS[src.name[0:3]] imgs = list(find_images(src, pattern)) if not imgs: click.echo('Could not find image for "{0}"'.format(str(src))) continue if n_expected_bands is None: # set first time n_expected_bands = len(imgs) else: if len(imgs) != n_expected_bands: click.echo('Product "{0}" has a different number of bands ' 'compared to expectation from first result ' '({1} vs {2})'.format(str(src), len(imgs), n_expected_bands)) continue # OK good to go with rasterio.open(str(imgs[0]), 'r') as example_ds: bounds = example_ds.bounds tiles = list(tilespec.bounds_to_tiles(bounds)) for tile in tiles: tile_hv = 'h{0:02d}v{1:02d}'.format(tile.horizontal, tile.vertical) dst_dir = dst.joinpath(tile_hv, src.stem) try: dst_dir.mkdir(parents=True) except OSError as oserr: if oserr.errno != errno.EEXIST: raise vrt = dst_dir.joinpath(src.stem + suffix) if not absolute: os.chdir(str(vrt.parent)) imgs = [relative_to(img, vrt) for img in imgs] cmd_str = CMD_GDALBUILDVRT.format( bounds=tile.bounds, vrt=vrt if absolute else vrt.name, images=' '.join([str(img) for img in imgs])) cmd = delegator.run(cmd_str) if cmd.return_code: click.echo('Error writing to: {0}'.format(vrt)) click.echo(cmd.err) else: click.echo('Wrote to: {0}'.format(vrt)) # Copy ESPA metadata shutil.copy(str(src), str(dst_dir.joinpath(src.name))) # Copy MTL if any mtl = list(src.parent.glob('L*MTL.txt')) if mtl: mtl = mtl[0] shutil.copy(str(mtl), str(dst_dir.joinpath(mtl.name))) click.echo('Done')
def get_info(mirror): jobs = requests.get("http://" + config['manager_save']['url'] + "/jobs").text list = json.loads(jobs) size = 'NULL' status = 'NULL' for job in list: if job['name'] == mirror: last_begin_time = job['last_started_ts'] last_time = job['last_ended_ts'] next_time = job['next_schedule_ts'] pass_time = str(last_time - last_begin_time) if int(pass_time) <= 0: pass_time = '-' size = job['size'] status = job['status'] file_name = remain = speed = rate = total = chk_now = chk_remain = '-' if status == 'syncing' and config[mirror]['type'] == 'rsync': i = 0 while i < 3: logs = delegator.run('tail -3 ' + path + "/logs/" + mirror + '/latest').out.split("\n") for log in logs: if log.find('B/s') >= 0: if log.find('xfr') >= 0: chk = str(re.findall(r'[(](.*?)[)]', log)[0]) chk_now = re.findall(r'[#](.*?)[,]', chk)[0] chk_remain = re.findall(r'[=](.*?)[/]', chk)[0] total = chk.split("/")[1] infos = log.split(" ") for info in infos: if info.find('%') >= 0: rate = info elif info.find('B/s') >= 0: speed = info elif info.find(':') >= 0: remain = info elif info.find('=') >= 0 or info.find( '#') >= 0: pass elif info: size = info elif log: files = log.split('/') file_name = files[-1] try: return { 'status': status, 'size': size, 'last_time': last_time, 'pass_time': pass_time, 'chk_now': chk_now, 'chk_remain': chk_remain, 'total': total, 'rate': rate, 'speed': speed, 'remain': remain, 'file_name': file_name, 'next_time': next_time } except: i += 1 return { 'status': status, 'size': size, 'last_time': last_time, 'pass_time': pass_time, 'next_time': next_time }
def systemd_control(action, mode): if mode == 'manager': return delegator.run("systemctl " + action + " tunasync_manager").out elif mode == 'worker': return delegator.run("systemctl " + action + " tunasync_worker").out
("EncodedAbsUrl", url), ("OriginalFilename", filename), ])) if not clean_filepath.exists(): if filename.suffix == ".pdf": copyfile(str(cache_dir / filename), str(clean_filepath)) elif (filename.suffix == ".doc") or (filename.suffix == ".docx"): command = ("libreoffice --convert-to 'pdf' " + "--nolockcheck " + "--headless " + "--outdir {} " + "'{}'").format( cache_dir, str(cache_dir / filename)) print(command) c = delegator.run(command) print(c.out) pdf_version = filename.stem + ".pdf" copyfile(str(cache_dir / pdf_version), str(clean_filepath)) elif filename.suffix == ".xps": pdf_version = filename.stem + ".pdf" command = "xpstopdf '{}' '{}'".format( str(cache_dir / filename), str(cache_dir / pdf_version) ) print(command) c = delegator.run(command) print(c.out) copyfile(os.path.join(cache_dir, pdf_version), clean_filepath) print("=>", clean_filename) print("====")
# GF_IMAGES_JOBS print(''' RUN WITH - 'LD_LIBRARY_PATH=./../../../rust/build python3 gf_simple_model.py' ''') # os.environ["LD_LIBRARY_PATH"] = "%s/../../../rust/build"%(modd_str) # assert "LD_LIBRARY_PATH" in os.environ.keys() # FIX!! - this is super temporary!! as soon as possible fix this, where the # PY extension lib file is not copied explicitly to the current directory. # see why using LD_LIBRARY_PATH doesnt work for gf_images_jobs_py.so, # but does work for libtensorflow.so import delegator gf_py_libs_path_str = "%s/../../../rust/build"%(modd_str) delegator.run("cp %s/gf_images_jobs_py.so %s"%(gf_py_libs_path_str, modd_str)) print("loading gf_images_jobs_py.so") import gf_images_jobs_py as gf_images_jobs #--------------------------- #---------------------------------------------- # LOAD_GENERATED def load__generated(p_generate_bool = False): print("load generated...") dataset_target_dir_path_str = "%s/test/data/output/generated"%(modd_str) dataset_train_file_path_str = "%s/tfrecords/test__train.tfrecords"%(dataset_target_dir_path_str)
def __init__(self): if delegator.run('which iptables', block=True).return_code == 0: self.__class__ = FirewallIPTables self.__init__()
def pip_install(package_name=None, r=None, allow_global=False): if r: c = delegator.run('{0} install -r {1} --require-hashes -i {2}'.format(which_pip(allow_global=allow_global), r, project.source['url'])) else: c = delegator.run('{0} install "{1}" -i {2}'.format(which_pip(allow_global=allow_global), package_name, project.source['url'])) return c
def test_cli_usage(self): delegator.run('mkdir test_project') os.chdir('test_project') os.environ['PIPENV_VENV_IN_PROJECT'] = '1' assert delegator.run('touch Pipfile').return_code == 0 assert delegator.run('pipenv --python python').return_code == 0 assert delegator.run('pipenv install requests').return_code == 0 assert delegator.run('pipenv install pytest --dev').return_code == 0 assert delegator.run('pipenv lock').return_code == 0 assert 'pytest' in delegator.run('cat Pipfile').out assert 'pytest' in delegator.run('cat Pipfile.lock').out os.chdir('..') delegator.run('rm -fr test_project')
def test_cli_usage(self): delegator.run('mkdir test_project') os.chdir('test_project') os.environ['PIPENV_VENV_IN_PROJECT'] = '1' assert delegator.run('touch Pipfile').return_code == 0 assert delegator.run('pipenv --python python').return_code == 0 assert delegator.run('pipenv install Werkzeug').return_code == 0 assert delegator.run('pipenv install pytest --dev').return_code == 0 # assert delegator.run('pipenv install https://pypi.python.org/packages/49/df/50aa1999ab9bde74656c2919d9c0c085fd2b3775fd3eca826012bef76d8c/requests-2.18.4-py2.py3-none-any.whl#md5=eb9be71cc41fd73a51a7c9cd1adde5de').return_code == 0 # Debug. print(delegator.run('pipenv install regex').err) assert delegator.run( 'pipenv install regex').return_code == 0 # failing before assert delegator.run( 'pipenv install git+https://github.com/requests/[email protected]#egg=requests' ).return_code == 0 assert delegator.run('pipenv lock').return_code == 0 # Test uninstalling a package after locking. assert delegator.run('pipenv uninstall Werkzeug').return_code == 0 pipfile_output = delegator.run('cat Pipfile').out lockfile_output = delegator.run('cat Pipfile.lock').out # Ensure uninstall works. assert 'Werkzeug' not in pipfile_output assert 'werkzeug' not in lockfile_output # Ensure dev-packages work. assert 'pytest' in pipfile_output assert 'pytest' in lockfile_output # Ensure vcs dependencies work. assert 'requests' in pipfile_output assert '"git": "https://github.com/requests/requests.git"' in lockfile_output os.chdir('..') delegator.run('rm -fr test_project')
def test_python_version_from_full_path(self): print(delegator.run('{0} --version'.format(FULL_PYTHON_PATH)).out) assert python_version(FULL_PYTHON_PATH) == "3.6.1"
def patch_android_apk(source: str, architecture: str, pause: bool, skip_cleanup: bool = True, enable_debug: bool = True, gadget_version: str = None, skip_resources: bool = False, network_security_config: bool = False, target_class: str = None, use_aapt2: bool = False, gadget_config: str = None, script_source: str = None, ignore_nativelibs: bool = True, manifest: str = None) -> None: """ Patches an Android APK by extracting, patching SMALI, repackaging and signing a new APK. :param source: :param architecture: :param pause: :param skip_cleanup: :param enable_debug: :param gadget_version: :param skip_resources: :param network_security_config: :param target_class: :param use_aapt2: :param gadget_config: :param script_source: :param manifest: :return: """ github = Github(gadget_version=gadget_version) android_gadget = AndroidGadget(github) # without an architecture set, attempt to determine one using adb if not architecture: click.secho('No architecture specified. Determining it using `adb`...', dim=True) o = delegator.run('adb shell getprop ro.product.cpu.abi') # read the ach from the process' output architecture = o.out.strip() if len(architecture) <= 0: click.secho( 'Failed to determine architecture. Is the device connected and authorized?', fg='red', bold=True) return click.secho( 'Detected target device architecture as: {0}'.format(architecture), fg='green', bold=True) # set the architecture we are interested in android_gadget.set_architecture(architecture) # check the gadget config flags if script_source and not gadget_config: click.secho( 'A script source was specified but no gadget configuration was set.', fg='red', bold=True) return # check if a gadget version was specified. if not, get the latest one. if gadget_version is not None: github_version = gadget_version click.secho( 'Using manually specified version: {0}'.format(gadget_version), fg='green', bold=True) else: github_version = github.get_latest_version() click.secho( 'Using latest Github gadget version: {0}'.format(github_version), fg='green', bold=True) # get local version of the stored gadget local_version = android_gadget.get_local_version('android_' + architecture) # check if the local version needs updating. this can be either because # the version is outdated or we simply don't have the gadget yet, or, we want # a very specific version if parse_version(github_version) != parse_version( local_version) or not android_gadget.gadget_exists(): # download! click.secho( 'Remote FridaGadget version is v{0}, local is v{1}. Downloading...' .format(github_version, local_version), fg='green') # download, unpack, update local version and cleanup the temp files. android_gadget.download() \ .unpack() \ .set_local_version('android_' + architecture, github_version) \ .cleanup() click.secho( 'Patcher will be using Gadget version: {0}'.format(github_version), fg='green') patcher = AndroidPatcher(skip_cleanup=skip_cleanup, skip_resources=skip_resources, manifest=manifest) # ensure that we have all of the commandline requirements if not patcher.are_requirements_met(): return # ensure we have the latest apk-tool and run the if not patcher.is_apktool_ready(): click.secho('apktool is not ready for use', fg='red', bold=True) return # work on patching the APK patcher.set_apk_source(source=source) patcher.unpack_apk() patcher.inject_internet_permission() if not ignore_nativelibs: patcher.extract_native_libs_patch() if enable_debug: patcher.flip_debug_flag_to_true() if network_security_config: patcher.add_network_security_config() patcher.inject_load_library(target_class=target_class) patcher.add_gadget_to_apk(architecture, android_gadget.get_frida_library_path(), gadget_config) if script_source: click.secho( 'Copying over a custom script to use with the gadget config.', fg='green') shutil.copyfile( script_source, os.path.join(patcher.apk_temp_directory, 'lib', architecture, 'libfrida-gadget.script.so')) # if we are required to pause, do that. if pause: click.secho(('Patching paused. The next step is to rebuild the APK. ' 'If you require any manual fixes, the current temp ' 'directory is:'), bold=True) click.secho('{0}'.format(patcher.get_temp_working_directory()), fg='green', bold=True) input('Press ENTER to continue...') patcher.build_new_apk(use_aapt2=use_aapt2) patcher.zipalign_apk() patcher.sign_apk() # woohoo, get the APK! destination = source.replace('.apk', '.objection.apk') click.secho( 'Copying final apk from {0} to {1} in current directory...'.format( patcher.get_patched_apk_path(), destination)) shutil.copyfile(patcher.get_patched_apk_path(), os.path.join(os.path.abspath('.'), destination))
import delegator from flask import Flask, request, jsonify, redirect app = Flask(__name__, static_url_path='', static_folder='ui') @app.route('/') def root(): return app.send_static_file('index.html') @app.route('/custom', methods=['POST']) def custom(): print(request.form.get('custom')) return jsonify({"success": True}) @app.before_request def before_request(): if request.host != '192.168.42.1': return redirect('http://192.168.42.1/') if __name__ == "__main__": delegator.run(['wifi-connect', '--portal-listening-port', '45454'], block=False) app.run(host='0.0.0.0', port=80)
def run(program_text): with program_context(program_text) as name: result = delegator.run('zion run {}'.format(name)) return result.out
def exec(name, ns, cmd): command = "kubectl exec deploy/{name} -n {ns} -- {cmd}".format( name=name, ns=ns, cmd=cmd) res = delegator.run(command).out return res
def uninstall( package_name=False, more_packages=False, three=None, python=False, system=False, lock=False, dev=False, all=False ): # Automatically use an activated virtualenv. if PIPENV_USE_SYSTEM: system = True # Ensure that virtualenv is available. ensure_project(three=three, python=python) package_names = (package_name,) + more_packages pipfile_remove = True # Un-install all dependencies, if --all was provided. if all is True: click.echo( crayons.yellow('Un-installing all packages from virtualenv...') ) do_purge(allow_global=system) sys.exit(0) # Uninstall [dev-packages], if --dev was provided. if dev: if 'dev-packages' in project.parsed_pipfile: click.echo( crayons.yellow('Un-installing {0}...'.format( crayons.red('[dev-packages]')) ) ) package_names = project.parsed_pipfile['dev-packages'] pipfile_remove = False else: click.echo(crayons.yellow('No {0} to uninstall.'.format( crayons.red('[dev-packages]'))) ) sys.exit(0) if package_name is False and not dev: click.echo(crayons.red('No package provided!')) sys.exit(1) for package_name in package_names: click.echo('Un-installing {0}...'.format( crayons.green(package_name)) ) c = delegator.run('"{0}" uninstall {1} -y'.format( which_pip(allow_global=system), package_name )) click.echo(crayons.blue(c.out)) if pipfile_remove: norm_name = pep423_name(package_name) in_dev_packages = (norm_name in project._pipfile.get('dev-packages', {})) in_packages = (norm_name in project._pipfile.get('packages', {})) if in_dev_packages or in_packages: click.echo( 'Removing {0} from Pipfile...'.format( crayons.green(package_name) ) ) else: click.echo( 'No package {0} to remove from Pipfile.'.format( crayons.green(package_name) ) ) continue # Remove package from both packages and dev-packages. project.remove_package_from_pipfile(package_name, dev=True) project.remove_package_from_pipfile(package_name, dev=False) if lock: do_lock()
def add_keys_to(instance_tag, key_path): # TODO This method could be optional delegator.run( ("gcloud compute instances add-metadata %s " "--metadata-from-file sshKeys=%s") % (instance_tag, key_path))
import delegator output_file_str = './bin/gf_3d.js' files_lst = [ 'gf_3d_test.ts', './../../ts/gf_3d.ts', ] print('files_lst - %s'%(files_lst)) print('RUNNING COMPILE...') r = delegator.run(f"tsc --module system --out {output_file_str} {' '.join(files_lst)}") print(r.out) print(r.err)
def _checkout_branch(repo_path, branch='master'): return delegator.run('git -C {0} checkout {1}'.format( repo_path, branch), block=False)
def patch_and_codesign_binary(self, frida_gadget: str, codesign_signature: str) -> None: """ Patches an iOS binary to load a Frida gadget on startup. Any other dylibs within the application will also be code signed with the same signature used for the FridaGadget itself. :param frida_gadget: :param codesign_signature: :return: """ if not self.app_binary: raise Exception('The applications binary should be set first.') if not self.app_folder: raise Exception('The application should be extracted first.') # create a Frameworks directory if it does not already exist if not os.path.exists(os.path.join(self.app_folder, 'Frameworks')): click.secho('Creating Frameworks directory for FridaGadget...', fg='green') os.mkdir(os.path.join(self.app_folder, 'Frameworks')) # copy the frida gadget to the applications Frameworks directory shutil.copyfile( frida_gadget, os.path.join(self.app_folder, 'Frameworks', 'FridaGadget.dylib')) # patch the app binary load_library_output = delegator.run(list2cmdline([ self.required_commands['insert_dylib']['location'], '--strip-codesig', '--inplace', '@executable_path/Frameworks/FridaGadget.dylib', self.app_binary ]), timeout=self.command_run_timeout) # check if the insert_dylib call may have failed if 'Added LC_LOAD_DYLIB' not in load_library_output.out: click.secho( 'Injecting the load library to {0} might have failed.'.format( self.app_binary), fg='yellow') click.secho(load_library_output.out, fg='red', dim=True) click.secho(load_library_output.err, fg='red') # get the paths of all of the .dylib files in this applications # bundle. we will have to codesign all of them and not just the # frida gadget dylibs_to_sign = [ os.path.join(dp, f) for dp, dn, fn in os.walk(self.app_folder) for f in fn if f.endswith('.dylib') ] # codesign the dylibs in this bundle click.secho('Codesigning {0} .dylib\'s with signature {1}'.format( len(dylibs_to_sign), codesign_signature), fg='green') for dylib in dylibs_to_sign: click.secho('Code signing: {0}'.format(os.path.basename(dylib)), dim=True) delegator.run( list2cmdline([ self.required_commands['codesign']['location'], '-f', '-v', '-s', codesign_signature, dylib ]))
def test_delegator_dir_run(self): c = delegator.run(['dir'], block=False) print (c.out)
def __init__(self): # out = cmd.execute('xcrun simctl list -j') result = delegator.run('xcrun simctl list -j') self.all = json.loads(result.out) self.devices = self.all['devices'] self.runtime = self.all['runtimes']
def run_cmd(self, *args): cmd_list = self.cmd_list + [str(arg) for arg in args] cmd = " ".join(cmd_list) return delegator.run(cmd)
def test_ls_run(self): c = delegator.run(['ls', '-lrt'], block=False) print (c.out)