def determine_delta_emissivity( outname, delta_emissivity_output, landcover_map, delta_lse_expression, info=False, ): """ Produce a delta emissivity map based on the FROM-GLC map covering the region of interest. """ msg = ('\n|i Determining delta land surface emissivity based on a ' 'look-up table ') if info: msg += (f'\n Expression:\n\n {delta_lse_expression}') g.message(msg) delta_lse_expression = replace_dummies( delta_lse_expression, instring=DUMMY_MAPCALC_STRING_FROM_GLC, outstring=landcover_map, ) delta_lse_equation = EQUATION.format( result=outname, expression=delta_lse_expression, ) grass.mapcalc(delta_lse_equation, overwrite=True) if info: run('r.info', map=outname, flags='r') # save delta land surface emissivity map? if delta_emissivity_output: run('g.rename', raster=(outname, delta_emissivity_output))
def test_kill_all_should_kill_all_nodes_and_remove_state_file(): run('create') assert_mongods_running(3) run('killnodes all') assert_mongods_running(0) assert_no_state_file()
def digital_numbers_to_radiance( outname, band, radiance_expression, null=False, quiet=False, ): """ Convert Digital Number values to TOA Radiance. For details, see in Landsat8 class. Zero (0) DNs set to NULL here (not via the class' function). """ if null: msg = "\n|i Setting zero (0) Digital Numbers in {band} to NULL" msg = msg.format(band=band) g.message(msg) run('r.null', map=band, setnull=0) msg = "\n|i Rescaling {band} digital numbers to spectral radiance " msg = msg.format(band=band) if not quiet: msg += '| Expression: ' msg += radiance_expression g.message(msg) radiance_expression = replace_dummies(radiance_expression, instring=DUMMY_MAPCALC_STRING_DN, outstring=band) radiance_equation = EQUATION.format(result=outname, expression=radiance_expression) grass.mapcalc(radiance_equation, overwrite=True) if not quiet: run('r.info', map=outname, flags='r')
def test_toy_geometric(): filename = download_mesh( 'toy.msh', '1d125d3fa9f373823edd91ebae5f7a81' ) mesh, _, _, _ = voropy.read(filename) mesh = voropy.mesh_tetra.MeshTetra( mesh.node_coords, mesh.cells['nodes'], mode='geometric' ) run( mesh, volume=9.3875504672601107, convol_norms=[0.20175742659663737, 0.0093164692200450819], ce_ratio_norms=[13.497977312281323, 0.42980191511570004], cellvol_norms=[0.091903119589148916, 0.0019959463063558944], tol=1.0e-6 ) cc = mesh.get_cell_circumcenters() cc_norm_2 = fsum(cc.flat) cc_norm_inf = max(cc.flat) assert abs(cc_norm_2 - 1103.7038287583791) < 1.0e-12 assert abs(cc_norm_inf - 3.4234008596539662) < 1.0e-12 return
def setup_hugepages(kind: StorageKind) -> None: num = 0 # remount to free up space while os.path.ismount("/dev/hugepages"): try: run(["sudo", "umount", "/dev/hugepages"]) break except subprocess.CalledProcessError as e: print(f"unmount {MOUNTPOINT} failed; retry in 1s") time.sleep(1) run(["sudo", "mount", "-t", "hugetlbfs", "hugetlbfs", "/dev/hugepages"]) set_hugepages(0) if kind != StorageKind.SPDK: return total_memory = get_total_memory() # leave 5 GB for the system gigabyte = 1024 * 1024 * 1024 spdk_memory = total_memory - 25 * gigabyte if spdk_memory < gigabyte: raise RuntimeError("Get more memory dude!") num = int(spdk_memory / 2048 / 1024) set_hugepages(num)
def determine_average_emissivity( outname, emissivity_output, landcover_map, avg_lse_expression, quiet=True, ): """ Produce an average emissivity map based on FROM-GLC map covering the region of interest. """ msg = ( '\n|i Determining average land surface emissivity based on a look-up table ' ) if not quiet: msg += ('| Expression:\n\n {exp}') msg = msg.format(exp=avg_lse_expression) g.message(msg) avg_lse_expression = replace_dummies( avg_lse_expression, instring=DUMMY_MAPCALC_STRING_FROM_GLC, outstring=landcover_map, ) avg_lse_equation = EQUATION.format( result=outname, expression=avg_lse_expression, ) grass.mapcalc(avg_lse_equation, overwrite=True) if not quiet: run('r.info', map=outname, flags='r') # save land surface emissivity map? if emissivity_output: run('g.rename', raster=(outname, emissivity_output))
def setup_luks(plain_dev: str, luks_name: str, key: str) -> str: run( [ "sudo", "cryptsetup", "-v", "--type", "luks2", "luksFormat", plain_dev, "--batch-mode", "--cipher", "capi:xts(aes)-plain64", # "aes-xts-plain64", "--key-size", "256", "--hash", "sha256", # default is argon2i, which requires 1GB of RAM "--pbkdf", "pbkdf2", ], input=key, ) cryptsetup_luks_open(plain_dev, luks_name, key) return f"/dev/mapper/{luks_name}"
def extract_app_files(url, timestamp): """ Extract the app zip file into an install directory (specified in config) """ install_path = path.join(install_parent, timestamp) # Unless install dir already exists, extract it if not (path.exists(install_path) and listdir(install_path)): tempfile_path = '/tmp/wsgi-app-package.tgz' create_dir(install_path) log( "Extracting '{url}' to '{dir}'".format( url=url, dir=install_path ) ) run(sh.rm, tempfile_path, f=True) urlretrieve(url, tempfile_path) # Extract files into install dir run( sh.tar, file=tempfile_path, directory=install_path, strip="1", z=True, x=True ) return install_path
def radiance_to_brightness_temperature( outname, radiance, temperature_expression, quiet=False, ): """ Convert Spectral Radiance to At-Satellite Brightness Temperature. For details see Landsat8 class. """ temperature_expression = replace_dummies( temperature_expression, instring=DUMMY_MAPCALC_STRING_RADIANCE, outstring=radiance) msg = "\n|i Converting spectral radiance to at-Satellite Temperature " if not quiet: msg += "| Expression: " + str(temperature_expression) g.message(msg) temperature_equation = EQUATION.format(result=outname, expression=temperature_expression) grass.mapcalc(temperature_equation, overwrite=True) if not quiet: run('r.info', map=outname, flags='r')
def test_arrow3d(): nodes = numpy.array([ [0.0, 0.0, 0.0], [2.0, -1.0, 0.0], [2.0, 0.0, 0.0], [2.0, 1.0, 0.0], [0.5, 0.0, -0.9], [0.5, 0.0, 0.9] ]) cellsNodes = numpy.array([ [1, 2, 4, 5], [2, 3, 4, 5], [0, 1, 4, 5], [0, 3, 4, 5] ]) mesh = voropy.mesh_tetra.MeshTetra(nodes, cellsNodes) run( mesh, 1.2, [0.58276428453480922, 0.459], [0.40826901831985885, 0.2295], [numpy.sqrt(0.45), 0.45] ) assert mesh.num_delaunay_violations() == 2 return
def determine_delta_emissivity( outname, delta_emissivity_output, landcover_map, delta_lse_expression, quiet=True, ): """ Produce a delta emissivity map based on the FROM-GLC map covering the region of interest. """ msg = "\n|i Determining delta land surface emissivity based on a " "look-up table " if not quiet: msg += "| Expression:\n\n {exp}" msg = msg.format(exp=delta_lse_expression) g.message(msg) delta_lse_expression = replace_dummies( delta_lse_expression, instring=DUMMY_MAPCALC_STRING_FROM_GLC, outstring=landcover_map, ) delta_lse_equation = EQUATION.format(result=outname, expression=delta_lse_expression) grass.mapcalc(delta_lse_equation, overwrite=True) if not quiet: run("r.info", map=outname, flags="r") # save delta land surface emissivity map? if delta_emissivity_output: run("g.rename", raster=(outname, delta_emissivity_output))
def test_cubesmall(): points = numpy.array([ [-0.5, -0.5, -5.0], [-0.5, +0.5, -5.0], [+0.5, -0.5, -5.0], [-0.5, -0.5, +5.0], [+0.5, +0.5, -5.0], [+0.5, +0.5, +5.0], [-0.5, +0.5, +5.0], [+0.5, -0.5, +5.0], ]) cells = numpy.array([[0, 1, 2, 3], [1, 2, 4, 5], [1, 2, 3, 5], [1, 3, 5, 6], [2, 3, 5, 7]]) mesh = meshplex.MeshTetra(points, cells) tol = 1.0e-14 cv = numpy.ones(8) * 1.25 cellvols = [5.0 / 3.0, 5.0 / 3.0, 10.0 / 3.0, 5.0 / 3.0, 5.0 / 3.0] assert near_equal(mesh.control_volumes, cv, tol) assert near_equal(mesh.cell_volumes, cellvols, tol) cv_norms = [ numpy.linalg.norm(cv, ord=2), numpy.linalg.norm(cv, ord=numpy.Inf) ] cellvol_norms = [ numpy.linalg.norm(cellvols, ord=2), numpy.linalg.norm(cellvols, ord=numpy.Inf), ] run(mesh, 10.0, cv_norms, [28.095851618771825, 1.25], cellvol_norms) return
def set_hugepages(num: int) -> None: run([ "sudo", "sh", "-c", "echo $0 > /sys/devices/system/node/node0/hugepages/hugepages-2048kB/nr_hugepages", str(num), ])
def test_should_start_mongods_and_save_state(): assert not os.path.isfile(state.filename) assert_mongods_running(0) run('create') assert os.path.isfile(state.filename) assert_mongods_running(3)
def umount(self) -> None: for i in range(3): try: run(["sudo", "umount", str(MOUNTPOINT)]) except subprocess.CalledProcessError: print(f"unmount {MOUNTPOINT} failed; retry in 1s") time.sleep(1) break
def test_sphere(): mesh = meshplex.read(this_dir / "meshes" / "sphere.vtk") run( mesh, 12.273645818711595, [1.0177358705967492, 0.10419690304323895], [366.3982135866799, 1.7062353589387327], [0.72653362732751214, 0.05350373815413411], )
def build_latex(docsdir, outputdir, version): run('make', 'clean', cwd=docsdir) run('make', 'latexpdf', cwd=docsdir) srcfilename = 'sympy-%s.pdf' % (version, ) dstfilename = 'sympy-docs-pdf-%s.pdf' % (version, ) src = join('doc', '_build', 'latex', srcfilename) dst = join(outputdir, dstfilename) shutil.copyfile(src, dst)
def test_file(): """Passing arguments should read from files.""" path1 = Path("src/basename.c").resolve() path2 = Path("src/cal.c").resolve() path3 = Path("src/cat.c").resolve() full_results = path1.read_text() + path2.read_text() + path3.read_text() assert run(["cat", str(path1)]).stdout == path1.read_text() assert run(["cat", str(path1), str(path2), str(path3)]).stdout == full_results
def test_kill_should_kill_just_specific_nodes(): run('create') assert_mongods_running(3) run('killnodes 2 3') assert_mongods_running(1) nodes = state.load() assert ['1'] == nodes.keys()
def tirs_to_at_satellite_temperature( tirs_1x, mtl_file, brightness_temperature_prefix=None, null=False, quiet=True): """ Helper function to convert TIRS bands 10 or 11 in to at-satellite temperatures. This function uses the pre-defined functions: - extract_number_from_string() - digital_numbers_to_radiance() - radiance_to_brightness_temperature() The inputs are: - a name for the input tirs band (10 or 11) - a Landsat8 MTL file The output is a temporary at-Satellite Temperature map. """ # which band number and MTL file band_number = extract_number_from_string(tirs_1x) tmp_radiance = tmp_map_name('radiance') + '.' + band_number tmp_brightness_temperature = tmp_map_name('brightness_temperature') + '.' + \ band_number landsat8 = Landsat8_MTL(mtl_file) # rescale DNs to spectral radiance radiance_expression = landsat8.toar_radiance(band_number) digital_numbers_to_radiance( tmp_radiance, tirs_1x, radiance_expression, null, quiet, ) # convert spectral radiance to at-satellite temperature temperature_expression = landsat8.radiance_to_temperature(band_number) radiance_to_brightness_temperature( tmp_brightness_temperature, tmp_radiance, temperature_expression, quiet, ) # save Brightness Temperature map? if brightness_temperature_prefix: bt_output = brightness_temperature_prefix + band_number run('g.rename', raster=(tmp_brightness_temperature, bt_output)) tmp_brightness_temperature = bt_output return tmp_brightness_temperature
def test_should_not_allow_create_if_state_file_exists(): run('create') assert os.path.isfile(state.filename) assert_mongods_running(3) output = run('create') assert exitcodes.MONGOD_ALREADY_STARTED == output.exitcode assert 'Mongods already started' in output.stderr assert 'state file found' in output.stderr
def test_sphere(): filename = download_mesh("sphere.vtk", "06b163871cc0f23344d71c990dffe577") mesh = meshplex.read(filename) run( mesh, 12.273645818711595, [1.0177358705967492, 0.10419690304323895], [366.3982135866799, 1.7062353589387327], [0.72653362732751214, 0.05350373815413411], )
def toggle_klayout(qtile): """Change the keyboard layout taking into account the positions defined in K_LAYOUTS""" query = run("setxkbmap -print", with_output=True) search_layout = re.search("\+(.*)\+", query).group(1) current_layout = re.sub(r"\)", "", re.sub(r"\(", " ", search_layout)) next_layout = K_LAYOUTS.index(current_layout) + 1 if next_layout >= len(K_LAYOUTS): next_layout = 0 command = "setxkbmap {}".format(K_LAYOUTS[next_layout]) run(command, with_output=False)
def test_sphere(): filename = download_mesh('sphere.msh', '70a5dbf79c3b259ed993458ff4aa2e93') mesh, _, _, _ = voropy.read(filename) run(mesh, 12.273645818711595, [1.0177358705967492, 0.10419690304323895], [366.3982135866799, 1.7062353589387327], [0.72653362732751214, 0.05350373815413411]) # assertEqual(mesh.num_delaunay_violations(), 60) return
def test_pacman(): filename = download_mesh('pacman.msh', '2da8ff96537f844a95a83abb48471b6a') mesh, _, _, _ = voropy.read(filename, flat_cell_correction='boundary') run(mesh, 73.64573933105898, [3.5908322974649631, 0.26638548094154707], [354.8184824409405, 0.94690319745399243], [2.6213234038171014, 0.13841739494523228]) assert mesh.num_delaunay_violations() == 0 return
def test_tetrahedron(): filename = download_mesh("tetrahedron.msh", "27a5d7e102e6613a1e58629c252cb293") mesh, _, _, _ = meshplex.read(filename) run( mesh, 64.1500299099584, [16.308991595922095, 7.0264329635751395], [6.898476155562041, 0.34400453539215237], [11.571692332290635, 2.9699087921277054], ) return
def mount(self) -> None: if self.kind not in [StorageKind.NATIVE, StorageKind.SCONE]: return MOUNTPOINT.mkdir(exist_ok=True) if self.hd_key and self.kind != StorageKind.SCONE: cryptsetup_luks_open(self.raw_dev, self.cryptsetup_name, self.hd_key) run(["sudo", "mount", self.dev, str(MOUNTPOINT)]) run(["sudo", "chown", "-R", getpass.getuser(), str(MOUNTPOINT)])
def test_pacman(): mesh = meshplex.read(this_dir / "meshes" / "pacman.vtk") run( mesh, 73.64573933105898, [3.596101914906618, 0.26638548094154696], [379.275476266239, 1.2976923100235962], [2.6213234038171014, 0.13841739494523228], ) assert mesh.num_delaunay_violations() == 0
def worker(): while True: item = q.get() # try: info = ydl.extract_info(item["video"], download=False) if (int(info["duration"] / 60) > DUR_LIMIT and item["sender_id"] not in SUDO_USERS): if "on_duration_limit" in item: if item["on_duration_limit"]: item["on_duration_limit"]["args"][0] = item[ "on_duration_limit"]["args"][0].format(DUR_LIMIT) run(item["on_duration_limit"]) q.task_done() elif info["is_live"]: if "on_is_live_error" in item: if item["on_is_live_error"]: run(item["on_is_live_error"]) q.task_done() else: file_name = info["id"] + "." + info["ext"] _log = item["play_function"]["kwargs"]["log"] if file_name not in os.listdir("downloads"): if "on_start" in item: if item["on_start"]: run(item["on_start"]) if _log: open("downloads/" + info["id"] + ".png", "wb+").write( requests.get(info["thumbnails"][-1]["url"]).content) ydl.download([item["video"]]) if _log: _log["kwargs"]["photo"] = generate_image( "downloads/" + info["id"] + ".png", info["title"], item["sender_name"]) run( item["play_function"], file="downloads/" + file_name, title=info["title"], duration=format_duration(info["duration"]), url="https://youtu.be/" + info["id"], log=_log, ) if "on_end" in item: if item["on_end"]: run(item["on_end"]) q.task_done()
def test_tetrahedron(): filename = download_mesh("tetrahedron.vtk", "10f3ccd1642b634b22741894fe6e7f1f") mesh = meshplex.read(filename) run( mesh, 64.1500299099584, [16.308991595922095, 7.0264329635751395], [6.898476155562041, 0.34400453539215237], [11.571692332290635, 2.9699087921277054], ) return
def test_pacman(): filename = download_mesh("pacman.vtk", "c621cb22f8b87cecd77724c2c0601c36") mesh = meshplex.read(filename) run( mesh, 73.64573933105898, [3.596101914906618, 0.26638548094154696], [379.275476266239, 1.2976923100235962], [2.6213234038171014, 0.13841739494523228], ) assert mesh.num_delaunay_violations() == 0
def set_current(timestamp): """ Set an app directory to the currently live app by creating a symlink as specified in config """ app_path = path.join(install_parent, timestamp) log( "Linking live path '{live}' to app dir: {app_dir}".format( app_dir=app_path, live=live_link_path ) ) run(sh.rm, live_link_path, force=True) run(sh.ln, app_path, live_link_path, symbolic=True) site_to_enable = path.join(sites_available_dir, timestamp) site_links = sh.glob(path.join(sites_enabled_dir, '*')) # Delete existing site links run(sh.rm, site_links, f=True) # Add our link into sites-enabled run(sh.ln, site_to_enable, sites_enabled_path, s=True) # Restart apache restart()
def container_status(self, container_name, group_name): if container_name not in self.list_containers(group_name): return { "status": "error", } dir_name = os.path.join(self.lxc_base_dir, group_name) command = self.lxc_commands["lxc-info"] + " " command += "--name {container_name} ".format(container_name=container_name) command += "--lxcpath {dir_name} ".format(dir_name=dir_name) result = helpers.run(command, output=True) result_dict = {} for line in result.splitlines(): split_list = line.split(":") if len(split_list) != 2: logger.debug("failed to split line '%s'", line) continue key, value = split_list key = key.strip() value = value.strip() if key == "state": result_dict["state"] = value return result_dict
def find(input): textVar = '' response = helpers.run(input) textVar = 'Destination %s \n' % response['destination'] for i in response['prices'].keys(): textVar += '\tAirport: %s\n\tPrice: %f\n\tCarrier Name:%s\n' % (i, response['prices'][i]['Price'], response['prices'][i]['CarrierName']) # textentry.configure(text=textVar) return textVar
def copy_ssl_certificates(timestamp): """ Copy either the default self-signed certificate or the provided custom ones into /etc/ssl/certs/wsgi-app.* Return the locations of the created files """ certs_dir = '/etc/ssl/certs' keyfile_path = path.join( certs_dir, 'wsgi-app.{0}.key'.format(timestamp) ) certificate_path = path.join( certs_dir, 'wsgi-app.{0}.crt'.format(timestamp) ) custom_keyfile = config('ssl_keyfile') custom_certificate = config('ssl_certificate') create_dir(certs_dir) log('Saving certificate files') if custom_keyfile and custom_certificate: keyfile_content = b64decode(custom_keyfile) certificate_content = b64decode(custom_certificate) with open(keyfile_path, 'w') as keyfile: keyfile.write(keyfile_content) with open(certificate_path, 'w') as certificate: certificate.write(certificate_content) else: config_path = path.join(charm_dir, 'ssl/wsgi-app.conf') run( sh.openssl.req, "-new", "-nodes", "-x509", "-newkey", "rsa:2048", "-days", "365", "-keyout", keyfile_path, "-out", certificate_path, "-config", config_path ) return (keyfile_path, certificate_path)
def setup_apache_wsgi(timestamp, app_dir): run(sh.a2enmod, "ssl", "proxy_http") available_path = path.join(sites_available_dir, timestamp) (keyfile_path, certificate_path) = copy_ssl_certificates(timestamp) conf_template = apache_conf_template(app_dir) wsgi_path = path.join(live_link_path, config('wsgi_file_path')) conf_content = conf_template.render({ 'wsgi_path': wsgi_path, 'wsgi_app_name': config('wsgi_app_name'), 'wsgi_dir': path.dirname(wsgi_path), 'wsgi_file': path.basename(wsgi_path), 'static_url_path': config('static_url_path'), 'static_path': path.join(live_link_path, config('static_path')), 'keyfile_path': keyfile_path, 'certificate_path': certificate_path }) # Save it to sites-available with open(available_path, 'w') as conf: conf.write(conf_content) # Add line to bottom of envvars to source scriptrc apache_env_file = path.join(apache_dir, 'envvars') source_file_path = path.join(charm_dir, 'scripts/scriptrc') source_command = '. {0}'.format(source_file_path) comment = '# scriptrc link added by apache2-wsgi charm:' comment_exists = False with open(apache_env_file) as env_file_read: comment_exists = comment in env_file_read.read() if not comment_exists: with open(apache_env_file, 'a') as env_file: env_file.write(comment + '\n') env_file.write(source_command + '\n')
def open_command(*repl_args): command = copy.copy(_command) if pre_hook: # might want to pass repl_args to the pre_hook pre_hook(command) to_run = [OPEN] if "app" in command: to_run.extend(["-a", command["app"]]) repl_args = [arg for arg in repl_args if arg] args = command.get("args", []) for arg in args: if arg.count("%s") == 1: if len(repl_args) == 1: arg = arg % helpers.quote(repl_args) else: arg = arg % helpers.quote(mac.pbpaste()) if arg.startswith("~"): arg = helpers.expand_path(arg) to_run.append(arg) helpers.run(to_run)
def pip_dependencies(app_path): """ Install pip dependencies from requirements file and from the dependencies directory """ # Read paths from config requirements_path = path.join(app_path, config('pip_requirements_path')) dependencies_path = path.join(app_path, config('pip_cache_path')) if path.isfile(requirements_path): # Install from requirements file if possible log("Installing pip requirements from {0}".format(requirements_path)) # Install dependencies in dependencies directory run( sh.pip.install, r=requirements_path, find_links=dependencies_path, # Path to local package files no_index=config('pip_no_index') # Use PyPi? )
def upload_to_s3(self, what, compress, recursive, mimetypes = False): args = [] if getattr(self, 'dry_run', False): args.append('--dry-run') if recursive: args.append('--recursive') if compress: args.append('--compress-all') args.extend('-c %s' % ext for ext in ('png', 'jpg', 'zip')) if mimetypes: args.append('--mimetypes') helpers.run(sys.executable, self.script, what, "--key", self.access, '--secret', self.secret, '--bucket', self.bucket, '--public-read', '--time', '--verbose', *args )
def do(self): # TODO: make sure repo is clean helpers.run(SVN, 'status', self.source) helpers.run(SVN, 'update', self.source) svnoutput = helpers.run(SVN, 'info', self.source, verbose = False) svninfo = dict(line.split(': ', 1) for line in filter(bool, svnoutput.splitlines())) self.revision = int(svninfo['Last Changed Rev']) if self.options.get('dest'): helpers.run(SVN, 'export', '-r', self.revision, self.source, self.dest)
def delete_container(self, container_name, group_name): if container_name not in self.list_containers(group_name): return False dir_name = os.path.join(self.lxc_base_dir, group_name) command = self.lxc_commands["lxc-destroy"] + " " command += "--name {container_name} ".format(container_name=container_name) command += "--lxcpath {dir_name} ".format(dir_name=dir_name) result = helpers.run(command) logger.debug("deleting '%s' for group '%s', result from command '%s'", container_name, group_name, result) return result
def list_containers(self, group_name): dir_name = os.path.join(self.lxc_base_dir, group_name) command = self.lxc_commands["lxc-ls"] + " " command += "--lxcpath {dir_name} ".format(dir_name=dir_name) output = helpers.run(command, output=True) logger.debug("listing containers for group '%s', output from command '%s'", group_name, output) container_names = [] for line in output.splitlines(): container_names.append(line) logger.debug("listing containers for group '%s', containers '%s'", group_name, container_names) return container_names
def create_container(self, container_name, group_name, template_name="ubuntu"): if container_name in self.list_containers(group_name): return False dir_name = os.path.join(self.lxc_base_dir, group_name) command = self.lxc_commands["lxc-create"] + " " command += "--name {container_name} ".format(container_name=container_name) command += "--template {template_name} ".format(template_name=template_name) command += "--lxcpath {dir_name} ".format(dir_name=dir_name) result = helpers.run(command) logger.debug("created container '%s' for group '%s', result from command '%s'", container_name, group_name, result) return result
def app_command(*args): run_args = ["open", "-a"] run_args.append(" ".join(app_name)) run_args.extend(args) run_args = [arg for arg in run_args if arg] helpers.run(run_args)
import os import tasks import helpers # Config --------------------------------------------------------------------- # Set up command line option handling, logger creation, and load config file options = helpers.get_options() logger_proxy, logging_mutex = helpers.make_logger(options, __file__) config = yaml.load(open(options.config).read()) def report(result): """Wrapper around Result.report""" result.report(logger_proxy, logging_mutex) # Pipeline ------------------------------------------------------------------- @files(list(tasks.fastq_to_other_files(config, extension='.bowtie.sam'))) def map(infile, outfile): result = tasks.bowtie(infile, outfile, config) report(result) @transform(map, suffix('.bowtie.sam'), '.bowtie.sam.count') def count(infile, outfile): result = tasks.count(infile, outfile, config) report(result) # ---------------------------------------------------------------------------- helpers.run(options)
from helpers import run import sys def invert(): return lambda img: 255 - img invert.types = () if __name__ == "__main__": run(invert, sys.argv)
def maestro(scriptId): """Run a Keyboard Maestro script by ID (more robust) or name.""" run("""osascript -e 'tell application "Keyboard Maestro Engine" to """ """do script "%s"'\n""" % scriptId)
def do(self): with helpers.cd(self.script.parent): helpers.run(self.path_nsis, '/V4', self.script)
import cv2 from helpers import run import sys def resize(scale): return lambda img: cv2.resize(img, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_NEAREST) resize.types = (float,) if __name__ == "__main__": run(resize, sys.argv)
def do(self): with helpers.cd(self.source): branch = getattr(self, 'branch', 'master') remote = getattr(self, 'remote', 'origin') git_status = helpers.run('git', 'status', '--porcelain', verbose = False).strip() helpers.run('git', 'checkout', branch) if git_status: helpers.run('git', 'reset', '--hard', branch) helpers.run('git', 'fetch', remote, branch) helpers.run('git', 'merge', '--ff-only', '%s/%s' % (remote, branch)) helpers.run('git', 'submodule', 'init') helpers.run('git', 'submodule', 'update') self.revision = helpers.run('git', 'rev-parse', '--short', 'HEAD', verbose = False).strip() if self.options.get('dest'): raise Exception("Haven't figured out the proper set of `git archive` commands yet") helpers.run('git', 'archive', '-o', self.options.get('dest') / ('%s.zip' % self.revision), branch) with helpers.cd(self.options.get('dest')): # TODO: this doesn't work. helpers.run('unzip', 'archive.zip')
dest_keys[0].get_contents_as_string()) self.assertEquals("Even better version", src_keys[0].get_contents_as_string()) def test_copy_key_both_versioned(self): source = self.s3.create_bucket("source") source.configure_versioning(True) destination = self.s3.create_bucket("destination") destination.configure_versioning(True) key = Key(source) key.name = "an/object" key.set_contents_from_string("This is an object") key.set_contents_from_string("This is a better version of an object") key.set_contents_from_string("Even better version") versions = source.get_all_versions(prefix="an/object") for version in versions[::-1]: destination.copy_key("another/object", "source", "an/object", src_version_id=version.version_id) dest_keys = destination.get_all_keys() src_keys = source.get_all_keys() self.assertEquals(len(src_keys), len(dest_keys)) dst_versions = destination.get_all_versions(prefix="another/object") self.assertEquals(len(versions), len(dst_versions)) for s_version, d_version in izip(versions, dst_versions): self.assertEquals(s_version.size, d_version.size) self.assertEquals(s_version.etag, d_version.etag) if __name__ == "__main__": helpers.run()
def test_should_list_all_running_nodes(): run('create') output = run('listnodes') assert_mongods_running(3) assert 3 == output.stdout.count('Node')
def test_should_not_fail_if_no_state_file(): assert_mongods_running(0) assert_no_state_file() output = run('listnodes')
import cv2 from helpers import run import sys def edges(low, high): return lambda img: cv2.Canny(img, low, high) edges.types = (int, int) if __name__ == "__main__": run(edges, sys.argv)
def do(self): with helpers.cd(self.installer_script.parent): print '*** running makensis with options %r ***' % (self.options,) helpers.run(self.nsis_exe, '/V4', self.installer_script)
def do(self): ## not valid in 2.6 #helpers.run(sys.executable, '-m', 'unittest', 'discover', '-s', self.dest / 'digsby' / 'src' / 'tests') helpers.run(sys.executable, self.dest / 'digsby' / 'src' / 'tests' / 'unittests' / 'runtests.py')