def compile_native_invocation_handler(*possible_homes): '''Find javac and compile NativeInvocationHandler.java.''' javac = find_javac(possible_homes) subprocess.check_call([ javac, '-target', '1.6', '-source', '1.6', join('jnius', 'src', 'org', 'jnius', 'NativeInvocationHandler.java') ])
def save_results(vcs, signature, result_path, patterns): """Save results matching `patterns` at `result_path`. Args: vcs (easyci.vcs.base.Vcs) - the VCS object for the actual project (not the disposable copy) signature (str) - the project state signature result_path (str) - the path containing the result, usually a disposable copy of the project patterns (str) - `rsync`-compatible patterns matching test results to save. """ results_directory = _get_results_directory(vcs, signature) if not os.path.exists(results_directory): os.makedirs(results_directory) with open(os.path.join(results_directory, 'patterns'), 'w') as f: f.write('\n'.join(patterns)) if not os.path.exists(os.path.join(results_directory, 'results')): os.mkdir(os.path.join(results_directory, 'results')) includes = ['--include={}'.format(x) for x in patterns] cmd = ['rsync', '-r'] + includes + ['--exclude=*', os.path.join(result_path, ''), os.path.join(results_directory, 'results', '')] subprocess.check_call(cmd)
def fix_bands(self, input_path, workspace, cfg, src_meta): """ """ output_path = os.path.join(workspace, 'fix_bands.vrt') if 2 == src_meta.get('nb_bands', 0) and src_meta.get( 'ispaletted', False): band = 1 call = [ 'gdal_translate', '-b', '{}'.format(band), '-of', 'VRT', input_path, output_path ] logger.debug(' '.join(call)) try: subprocess.check_call(call) except subprocess.CalledProcessError as e: logger.error('Could not fix bands.') logger.debug(traceback.print_exc()) raise # Update metadata src_meta['nb_bands'] = 1 if 0 < len(src_meta['nodatavalues']): src_meta['nodatavalues'] = [src_meta['nodatavalues'][0]] return output_path # By default return input_path
def guarded_join(*sub_paths, **kwargs): """ Uses os.path.join to get path from given args. checks if path directory is available by using check_call method of backport from subprocess module from python 3.X (subprocess32) with given timeout and returns path. parameters: [sub_dirs] (strings) - arguments for os.path.join timeout (int) - Timeout for availablity check in sec. (default 1) returns: path (string) possible Exeptions: IOError FileSystemNotAvailable """ timeout = kwargs.get('timeout', DEFAULT_TIMEOUT) full_path = os.path.join(*sub_paths) # Move this check here to allow check in runtime if getattr(settings, 'GUARDED_JOIN_TEST', False): raise FileSystemNotAvailable( 'This is a test Exception. disable in settings') try: check_call(['test', '-e', full_path], timeout=timeout) except CalledProcessError: raise IOError('No such file or directory: %s' % full_path) except TimeoutExpired: raise FileSystemNotAvailable('Cannot access %s. Tried for %s seconds' % (full_path, timeout)) return full_path
def compute(version_number, max_events, skip_events, event_file, log_dir, tmp_dir, aod_dir, reco, job_id): ''' Runs reco.py with the given parameters. ''' import subprocess32, shutil, socket, os log_dir = os.path.join(log_dir, 'job{:0>4}'.format(job_id)) tmp_dir = os.path.join(tmp_dir, 'job{:0>4}'.format(job_id)) try: os.makedirs(log_dir) os.makedirs(tmp_dir) except OSError as e: print(e) athena_log = os.path.join(log_dir, 'athena.log') arg = 'nice {} {} -n {} -s {} --log_file {} --tmp_dir {} --output_dir {} {} {}'.format( sys.executable, reco, max_events, skip_events, athena_log, tmp_dir, aod_dir, event_file, version_number) with open(os.path.join(log_dir, 'reco.log'), 'w+') as fh: subprocess32.check_call(arg, executable='/bin/bash', shell=True, stdout=fh, stderr=subprocess32.STDOUT, cwd=os.getcwd(), env=os.environ.copy()) try: shutil.rmtree(log_dir) shutil.rmtree(tmp_dir) except OSError as e: print(e) return socket.gethostname()
def build(self): if "Makefile" in os.listdir(os.getcwd()): subprocess.check_call(["make", "clean"]) subprocess.check_call(["autoreconf", "--install"]) subprocess.check_call(["automake"]) subprocess.check_call(["./configure", "CFLAGS=-g"]) subprocess.check_call(["make"], stderr=subprocess.STDOUT)
def guarded_join(*sub_paths, **kwargs): """ Uses os.path.join to get path from given args. checks if path directory is available by using check_call method of backport from subprocess module from python 3.X (subprocess32) with given timeout and returns path. parameters: [sub_dirs] (strings) - arguments for os.path.join timeout (int) - Timeout for availablity check in sec. (default 1) returns: path (string) possible Exeptions: IOError FileSystemNotAvailable """ timeout = kwargs.get("timeout", DEFAULT_TIMEOUT) full_path = os.path.join(*sub_paths) # Move this check here to allow check in runtime if getattr(settings, "GUARDED_JOIN_TEST", False): raise FileSystemNotAvailable("This is a test Exception. disable in settings") try: check_call(["test", "-e", full_path], timeout=timeout) except CalledProcessError: raise IOError("No such file or directory: %s" % full_path) except TimeoutExpired: raise FileSystemNotAvailable("Cannot access %s. Tried for %s seconds" % (full_path, timeout)) return full_path
def compile_native_invocation_handler(*possible_homes): '''Find javac and compile NativeInvocationHandler.java.''' javac = find_javac(possible_homes) source_level = '1.6' # We have to check what version of javac this is, because -target 1.6 is # no longer supported on JDKs >= 12. javac_version = subprocess.check_output([javac, '-version'], stderr=subprocess.STDOUT) for m in re.finditer(r'\d+', javac_version.decode('ascii')): if int(m.group(0)) >= 12: source_level = '1.7' break try: subprocess.check_call([ javac, '-target', source_level, '-source', source_level, join('jnius', 'src', 'org', 'jnius', 'NativeInvocationHandler.java') ]) except FileNotFoundError: subprocess.check_call([ javac.replace('"', ''), '-target', source_level, '-source', source_level, join('jnius', 'src', 'org', 'jnius', 'NativeInvocationHandler.java') ])
def temp_copy(self): """Yields a new Vcs object that represents a temporary, disposable copy of the current repository. The copy is deleted at the end of the context. The following are not copied: - ignored files - easyci private directory (.git/eci for git) Yields: Vcs """ with contextmanagers.temp_dir() as temp_dir: temp_root_path = os.path.join(temp_dir, "root") path = os.path.join(self.path, "") # adds trailing slash check_call( [ "rsync", "-r", "--exclude={}".format(self.private_dir()), "--filter=dir-merge,- {}".format(self.ignore_patterns_file()), path, temp_root_path, ] ) copy = self.__class__(path=temp_root_path) yield copy
def test_exit_no_option(): # It's valid to run 'makeotfexe' without using any options, # but if a default-named font file ('font.ps') is NOT found # in the current directory, the tool exits with an error with pytest.raises(subprocess.CalledProcessError) as err: subprocess.check_call([TOOL]) assert err.value.returncode == 1
def bam_to_sorted_bam(input_file_name, sorted_file_name): sort_command = [ 'samtools', 'sort', '-T', sorted_file_name, '-o', sorted_file_name, input_file_name ] subprocess.check_call(sort_command) index_bam(sorted_file_name)
def run( self, cmd, ignore_failure=False ): fail_ch, success_ch = self.channel_ids if ignore_failure: cmd = '( %s ) ; tmux wait -S %s' % (cmd, success_ch) else: cmd = '( %s ) && tmux wait -S %s || tmux wait -S %s' % (cmd, success_ch, fail_ch) check_call( [ 'tmux', 'send-keys', '-t', self.tmux_id, cmd, 'C-m' ] )
def _download_file(command_template, remote_loc, local_path): with atomic_output_file(local_path, make_parents=True) as temp_target: popenargs = shell_expand_to_popen(command_template, dict_merge(os.environ, {"REMOTE": remote_loc, "LOCAL": temp_target})) log.info("downloading: %s", " ".join(popenargs)) # TODO: Find a way to support force here. subprocess.check_call(popenargs, stdout=SHELL_OUTPUT, stderr=SHELL_OUTPUT, stdin=DEV_NULL)
def build_bowtie2_index(index_prefix, sequence_file_names): bowtie2_build_command = [ 'bowtie2-build', ','.join(sequence_file_names), index_prefix, ] subprocess.check_call(bowtie2_build_command)
def test_run_tests_pytest_unauthorised(test_pack, tmpdir, portal_mock): with open('token', 'w') as f: f.write("this is my token") env = os.environ.copy() env['PYTHONPATH'] = os.path.dirname(os.path.abspath(__file__)) subprocess.check_call([ "git", "remote", "set-url", "origin", "http://%s:%i/unauthorised.git" % portal_mock.address ]) try: subprocess.check_output([ python, '-m', 'pytest', '-vv', '-p', 'stbt_rig', '-p', 'no:python', '--portal-url=%s' % portal_mock.url, '--portal-auth-file=token', '--node-id=mynode', 'tests/test.py::test_my_tests' ], env=env, stderr=subprocess.STDOUT) assert False, "pytest should have failed with auth error" except subprocess.CalledProcessError as e: print(e.output) assert ((b"could not read Username for" in e.output and b'terminal prompts disabled' in e.output) or b"Authentication failed for" in e.output) finally: subprocess.check_call(["git", "remote", "set-url", "origin", "."])
def _install_deisctl(): script = requests.get(settings.DEISCTL_INSTALL_URL).text subprocess.check_call([ 'bash', '-c', script, 'install.sh', '1.12.3', private_dir.private_dir_path(settings.APP_NAME) ]) os.chmod(path_utils.executable_path('deisctl'), stat.S_IRWXU)
def _mount_local(self, file_name_no_extension): """ Mount a image-file to a class-defined folder. Aborts if the mount command fails. Args: file_name_no_extension (str): The file name of the image that will be flashed on the device Returns: None """ logger.info( "Mounting the root partition for ssh-key and USB-networking " + "service injection.") try: common.make_directory(self._LOCAL_MOUNT_DIR) root_file_system_file = file_name_no_extension + "." + \ self._root_extension subprocess32.check_call( ["mount", root_file_system_file, self._LOCAL_MOUNT_DIR]) except subprocess32.CalledProcessError as err: logger.info("Failed to mount.") common.log_subprocess32_error_and_abort(err)
def __init__(self): super(Pane, self).__init__() # One tmux channel for success, one for failures. See tmux(1). self.channel_ids = tuple(uuid.uuid4() for _ in range(2)) # A queue between the daemon threads that service the channels and the client code. The # queue items are the channel index, 0 for failure, 1 or success. self.queue = Queue(maxsize=1) # The pane index. self.index = len(self.panes) window = '%s:0' % self.session if self.index == 0: self.log("Run 'tmux attach -t %s' to monitor output" % self.session) check_call([ 'tmux', 'new-session', '-d', '-s', self.session, '-x', '100', '-y', '80' ]) self.tmux_id = check_output( ['tmux', 'list-panes', '-t', window, '-F', '#{pane_id}']).strip() else: self.tmux_id = check_output([ 'tmux', 'split-window', '-v', '-t', window, '-PF', '#{pane_id}' ]).strip() check_call( ['tmux', 'select-layout', '-t', window, 'even-vertical']) self.panes.append(self) self.threads = tuple(self._start_thread(i) for i in range(2))
def generate_images(workspace_dir, landcover_raster_path, farm_vector_path): """ generates the images and reads in the bytes of each image """ result = {} year0_farm_on_raster_path = os.path.join(workspace_dir, 'landcover_and_farm.png') burn_vector_script_path = os.path.join(app_docker_dir_path, 'burn-vector-to-raster-png.sh') subprocess.check_call([ burn_vector_script_path, landcover_raster_path, year0_farm_on_raster_path, farm_vector_path, farm_layer_and_file_name, str(farm_lucode) ], stdout=subprocess.DEVNULL) with open(year0_farm_on_raster_path, 'rb') as f1: result['base'] = base64.b64encode(f1.read()).decode('utf-8') reveg_vector_path = os.path.join(workspace_dir, 'year1', reproj_reveg_filename) is_only_year0_run = not os.path.isfile(reveg_vector_path) if is_only_year0_run: return result reveg_and_farm_on_raster_path = os.path.join( workspace_dir, 'landcover_and_farm_and_reveg.png') subprocess.check_call([ burn_vector_script_path, year0_farm_on_raster_path.replace('.png', '.tif'), reveg_and_farm_on_raster_path, reveg_vector_path, KNOWN_LAYER_NAME, str(reveg_lucode) ], stdout=subprocess.DEVNULL) with open(reveg_and_farm_on_raster_path, 'rb') as f2: result['reveg'] = base64.b64encode(f2.read()).decode('utf-8') return result
def apply_migration_step( url, username, password, migration, step, instance, phase, engine, migrations_dir, show): """Apply migration step.""" print("-- Applying migration step: id={step[id]}, position={step[position]}".format(step=step)) report = '' exc = None try: if step['type'] == engine.dialect.name: if show: print(step['code']) else: report = 'Executed SQL with rowcount: {0}'.format(engine.execute(step['code']).rowcount) else: path = os.path.join(migrations_dir, step['path']) args = [path] if step['type'] == 'python': args.insert(0, sys.executable) if show: print('-- Script to be executed: {0}'.format(' '.join(args))) else: with CaptureOutput() as capturer: subprocess.check_call(args) report = capturer.get_text() status = 'apl' except Exception as exc: report = traceback.format_exc() status = 'err' try: if not show: print('Applied migration step with status: {0}'.format(status)) print('Reporting status to deployment tool') data = { "report": { "migration": { "uid": migration['uid'] }, "instance": { "name": instance, } }, "step": { "id": step['id'] }, "status": status, "log": report } response = requests.post( '{url}/api/migration-step-reports/'.format(url=url), auth=(username, password), data=json.dumps(data, sort_keys=True), headers={'content-type': 'application/json'} ) try: response.raise_for_status() except Exception: pprint.pprint(response.json()) raise finally: if exc: six.reraise(exc, None, sys.exc_traceback)
def run_ri_layer(layer): layer_inputs_fp = os.path.join(input_dir, 'RI_{}'.format(layer)) _input = 'gultobin -S 1 < {} | fmcalc -p {} -a {} | tee ils.bin |'.format(guls_fp, input_dir, oed.ALLOCATE_TO_ITEMS_BY_PREVIOUS_LEVEL_ALLOC_ID) if layer == 1 else '' pipe_in_previous_layer = '< ri{}.bin'.format(layer - 1) if layer > 1 else '' ri_layer_fp = os.path.join(output_dir, 'ri{}.csv'.format(layer)) cmd = '{} fmcalc -p {} -n -a {} {}| tee ri{}.bin | fmtocsv > {}'.format( _input, layer_inputs_fp, oed.ALLOCATE_TO_ITEMS_BY_PREVIOUS_LEVEL_ALLOC_ID, pipe_in_previous_layer, layer, ri_layer_fp ) print("\nGenerating deterministic RI layer {} losses with command: {}\n".format(layer, cmd)) try: check_call(cmd, shell=True) except CalledProcessError as e: raise OasisException(e) rils = pd.read_csv(ri_layer_fp) rils.drop(rils[rils['sidx'] != 1].index, inplace=True) rils.drop('sidx', axis=1, inplace=True) rils.reset_index(drop=True, inplace=True) rils = rils[(rils[['loss']] != 0).any(axis=1)] return rils
def init_implementations(): sys.stdout.write("Initializing kcc -- ") sys.stdout.flush os.chdir(location) subprocess.check_call(["kcc", "-c", "implementations.c", "-o", "implementations.o"]) sys.stdout.write("ok\n") sys.stdout.flush()
def two_seven_compatible(filePath): """Determines if a python file is 2.7 compatible by seeing if it compiles in a subprocess""" try: check_call(['python2', '-m', 'py_compile', filePath], stderr=DEVNULL) except CalledProcessError: raise RuntimeError('Python files must be 2.7 compatible') return True
def testFlagArguments(self): subprocess.check_call([ "gaffer", "python", os.path.dirname(__file__) + "/pythonScripts/flagArguments.py", "-arguments", "-flag1", "-flag2" ])
def main(outfile): t = jinja2.Template(r''' digraph git_concepts { graph[label="Git Concepts {{now}}" fontsize=24 fontname="times-bold" labelloc=t] node[fontname=Courier fontsize=12 shape=box] edge[fontname=Courier fontsize=8 arrowhead=empty fontcolor=red] {% for e in edges %} "{{e[0]}}" -> "{{e[2]}}"[label="{{e[1]}}"] {% endfor %}} '''.lstrip('\n').decode('UTF-8')) out = t.render( now=datetime.datetime.now().strftime('%Y-%m-%dT%H:%M'), edges=_get_edges(), ).encode('UTF-8') print out if outfile is not None: with open(outfile, 'wb') as fh: fh.write(out) cmd = ['dot', '-Tsvg', '-O', outfile] subprocess32.check_call(cmd)
def _get_is_valid_retire_version(self): cmd = shlex.split(self.RETIRE_CMD_VERSION) retire_version_fd = tempfile.NamedTemporaryFile(prefix='retirejs-version-', suffix='.out', delete=False, mode='w') try: subprocess.check_call(cmd, stderr=subprocess.DEVNULL, stdout=retire_version_fd) except subprocess.CalledProcessError: msg = 'Unexpected retire.js exit code. Disabling grep.retirejs plugin.' om.out.error(msg) return False retire_version_fd.close() current_retire_version = open(retire_version_fd.name).read() self._remove_file(retire_version_fd.name) if current_retire_version.startswith(self.RETIRE_VERSION): om.out.debug('Using a supported retirejs version') return True om.out.error('Please install a supported retirejs version (2.x)') return False
def vidindex(fn, typ): vidindex_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "vidindex") vidindex = os.path.join(vidindex_dir, "vidindex") subprocess.check_call(["make"], cwd=vidindex_dir, stdout=open("/dev/null", "w")) with tempfile.NamedTemporaryFile() as prefix_f, \ tempfile.NamedTemporaryFile() as index_f: try: subprocess.check_call( [vidindex, typ, fn, prefix_f.name, index_f.name]) except subprocess.CalledProcessError as e: raise DataUnreadableError("vidindex failed on file %s" % fn) with open(index_f.name, "rb") as f: index = f.read() with open(prefix_f.name, "rb") as f: prefix = f.read() index = np.frombuffer(index, np.uint32).reshape(-1, 2) assert index[-1, 0] == 0xFFFFFFFF assert index[-1, 1] == os.path.getsize(fn) return index, prefix
def up_node1(request): def fin(): subproc.call(['sudo', 'vagrant', 'destroy', '-f', 'node1']) request.addfinalizer(fin) subproc.check_call(['sudo', 'vagrant', 'destroy', '-f', 'node1']) subproc.check_call(['sudo', 'vagrant', 'up', 'node1'])
def _got_response(self, queue): server_list, unsatisfiable_jobs = self._calculator.servers_for_queue(queue) # Cancel any job/container with unsatisfiable requirements, emitting # a log explaining why. for job_uuid, reason in unsatisfiable_jobs.iteritems(): try: self._client.logs().create(body={ 'object_uuid': job_uuid, 'event_type': 'stderr', 'properties': {'text': reason}, }).execute() # Cancel the job depending on its type if arvados.util.container_uuid_pattern.match(job_uuid): subprocess.check_call(['scancel', '--name='+job_uuid]) elif arvados.util.job_uuid_pattern.match(job_uuid): self._client.jobs().cancel(uuid=job_uuid).execute() else: raise Exception('Unknown job type') self._logger.debug("Cancelled unsatisfiable job '%s'", job_uuid) except Exception as error: self._logger.error("Trying to cancel job '%s': %s", job_uuid, error) self._logger.debug("Calculated wishlist: %s", ', '.join(s.id for s in server_list) or "(empty)") return super(JobQueueMonitorActor, self)._got_response(server_list)
def _csv_to_bin(csv_directory, bin_directory, il=False): """ Create a set of binary files. """ if not os.path.exists(bin_directory): os.mkdir(bin_directory) if il: input_files = viewvalues(INPUT_FILES) else: input_files = (f for f in viewvalues(INPUT_FILES) if f['type'] != 'il') for input_file in input_files: conversion_tool = input_file['conversion_tool'] input_file_path = os.path.join(csv_directory, '{}.csv'.format(input_file['name'])) if not os.path.exists(input_file_path): continue output_file_path = os.path.join(bin_directory, '{}.bin'.format(input_file['name'])) cmd_str = "{} < {} > {}".format(conversion_tool, input_file_path, output_file_path) try: subprocess.check_call(cmd_str, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: raise OasisException(e)
def reco(evnt_file, version, output_dir, num_events, skip_events, log_file, tmp_dir): log_file_handle = open(log_file, 'w+') asetup = '. /phys/users/gwatts/bin/CommonScripts/configASetup.sh && . $AtlasSetup/scripts/asetup.sh' input_file = evnt_file for tag in constants.tags: output_file = string.split(input_file, sep='/')[-1] + '_' + tag.tag command = tag.command.format( input_file, output_file) + ' --maxEvents {} --skipEvents {}'.format( num_events, skip_events) arg = '{} {} && {}'.format(asetup, tag.release, command) print('{} arg: {}'.format(tag.tag, arg)) sys.stdout.flush() try: subprocess32.check_call(arg, executable='/bin/bash', cwd=tmp_dir, shell=True, stdout=log_file_handle, stderr=subprocess32.STDOUT, env=os.environ.copy()) except subprocess32.CalledProcessError as e: print('reco.py: {}'.format(e)) sys.stdout.flush() break input_file = output_file # move the aod file to the output directory, and make it immutable so that # it is not accidentally deleted. os.rename(os.path.join(tmp_dir, output_file), os.path.join(output_dir, output_file)) output_file_path = os.path.join(output_dir, output_file) st = os.stat(output_file_path) not_writable = ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) os.chmod(output_file_path, st.st_mode & not_writable)
def _mount_local(self, file_name_no_extension): """ Mount a image-file to a class-defined folder. Aborts if the mount command fails. Args: file_name_no_extension (str): The file name of the image that will be flashed on the device Returns: None """ logger.info( "Mounting the root partition for ssh-key and USB-networking " + "service injection.") try: common.make_directory(self._LOCAL_MOUNT_DIR) root_file_system_file = file_name_no_extension + "." + \ self._root_extension # guestmount allows us to mount the image without root privileges subprocess32.check_call( ["guestmount", "-a", root_file_system_file, "-m", "/dev/sda", self._LOCAL_MOUNT_DIR]) except subprocess32.CalledProcessError as err: logger.info("Failed to mount.") common.log_subprocess32_error_and_abort(err)
def open_interface(self): """ Open the host's network interface for testing Returns: None """ interface = self._get_usb_nic() ip_subnet = self._host_ip + "/30" logger.info("Opening the host network interface for testing.") # The ifconfig command requires root privileges to run, and in general # we would like to run AFT without root privileges. However, we can add # a shell script to the sudoers file, which allows us to invoke it with # sudo, without the whole program requiring sudo. Hence, the below commands # will succeed even without root privileges # Note: Assumes that this file is under aft/devices, and that the shell # script is under aft/tools interface_script = os.path.join(os.path.dirname(__file__), os.path.pardir, "tools", "interface_script.sh") subprocess32.check_call(["sudo", interface_script, interface, "up"]) subprocess32.check_call( ["sudo", interface_script, interface, ip_subnet])
def attach(self): if self.attached: raise Exception("Wrong usage. Ramdisk should not be attached") if not check_is_same_device_as_root_fs(self.folder): msg = ('Folder must be on / ' '(ROOT) and must not be a different device (possibly ' 'already a RAMDISK)! Maybe try "umount {0}"?').format(self.folder) raise argparse.ArgumentTypeError(msg) create_ramdisk_stdout = subprocess.check_output( ['hdid','-nomount', 'ram://{0}'.format(self.size)]) ram_disk_device = create_ramdisk_stdout.strip().strip('\n') check_is_not_normal_harddrive(ram_disk_device) logger.info('Created RAM disk {0}'.format(ram_disk_device)) logger.info('Formatting RAM disk...') format_stdout = subprocess.check_output(['newfs_hfs', ram_disk_device]) #Initialized /dev/rdisk13 as a 512 MB HFS Plus volume assert format_stdout, format_stdout old_ionode_nbr = os.stat(self.folder).st_ino logger.info('Mounting RAM disk {0} as {1}'.format(ram_disk_device, self.folder)) subprocess.check_call(['mount','-t','hfs', ram_disk_device, self.folder]) assert old_ionode_nbr != os.stat(self.folder).st_ino # TODO: probably remove this assert not check_is_same_device_as_root_fs(self.folder) self.ram_disk_device = ram_disk_device self.attached = True
def _autodetect_unzip_command(): unzip_cmd = None unzip_output = None try: unzip_output = subprocess.check_output(["unzip", "-v"]) unzip_cmd = "unzip -q $ARCHIVE" except subprocess.CalledProcessError as e: pass # On MacOS Yosemite, unzip does not support Zip64, but ditto is available. # See: https://github.com/jlevy/instaclone/issues/1 if not unzip_cmd or not unzip_output or unzip_output.find( "ZIP64_SUPPORT") < 0: log.debug("did not find 'unzip' with Zip64 support; trying ditto") try: # ditto has no simple flag to check its version and exit with 0 status code. subprocess.check_call( ["ditto", "-c", "/dev/null", tempfile.mktemp()]) unzip_cmd = "ditto -x -k $ARCHIVE ." except subprocess.CalledProcessError as e: log.debug("did not find ditto") if not unzip_cmd: raise ArchiveError( "Archive handling requires 'unzip' or 'ditto' in path") log.debug("unzip command: %s", unzip_cmd) return unzip_cmd
def stop_dev_server(): try: subprocess.check_call(SH_KILL_DEV, shell=True) subprocess.call(SH_KILL_SELF, shell=True) click.secho("[Done]", fg="green") except subprocess.CalledProcessError as e: click.secho("[Failed]", fg="red")
def _got_response(self, queue): server_list, unsatisfiable_jobs = self._calculator.servers_for_queue( queue) # Cancel any job/container with unsatisfiable requirements, emitting # a log explaining why. for job_uuid, reason in unsatisfiable_jobs.iteritems(): try: self._client.logs().create( body={ 'object_uuid': job_uuid, 'event_type': 'stderr', 'properties': { 'text': reason }, }).execute() # Cancel the job depending on its type if arvados.util.container_uuid_pattern.match(job_uuid): subprocess.check_call(['scancel', '--name=' + job_uuid]) elif arvados.util.job_uuid_pattern.match(job_uuid): self._client.jobs().cancel(uuid=job_uuid).execute() else: raise Exception('Unknown job type') self._logger.debug("Cancelled unsatisfiable job '%s'", job_uuid) except Exception as error: self._logger.error("Trying to cancel job '%s': %s", job_uuid, error) self._logger.debug("Calculated wishlist: %s", ', '.join(s.id for s in server_list) or "(empty)") return super(JobQueueMonitorActor, self)._got_response(server_list)
def _autodetect_unzip_command(): unzip_cmd = None unzip_output = None try: unzip_output = subprocess.check_output(["unzip", "-v"]) unzip_cmd = "unzip -q $ARCHIVE" except subprocess.CalledProcessError as e: pass # On MacOS Yosemite, unzip does not support Zip64, but ditto is available. # See: https://github.com/vivlabs/instaclone/issues/1 if not unzip_cmd or not unzip_output or unzip_output.find("ZIP64_SUPPORT") < 0: log.debug("did not find 'unzip' with Zip64 support; trying ditto") try: # ditto has no simple flag to check its version and exit with 0 status code. subprocess.check_call(["ditto", "-c", "/dev/null", tempfile.mktemp()]) unzip_cmd = "ditto -x -k $ARCHIVE ." except subprocess.CalledProcessError as e: log.debug("did not find ditto") if not unzip_cmd: raise ArchiveError("Archive handling requires 'unzip' or 'ditto' in path") log.debug("unzip command: %s", unzip_cmd) return unzip_cmd
def validate(sequence): s["PythonCommand"]["sequence"].setValue(sequence) s["fileName"].setValue(self.__scriptFileName) s.context().setFrame(10) s.save() subprocess.check_call([ "gaffer", "execute", self.__scriptFileName, "-frames", "5", "-nodes", "PythonCommand" ]) self.assertTrue( os.path.exists(self.temporaryDirectory() + "/canSerialiseFrameDependentPlug.gfr")) ss = Gaffer.ScriptNode() ss["fileName"].setValue(self.temporaryDirectory() + "/canSerialiseFrameDependentPlug.gfr") ss.load() # we must retain the non-substituted value self.assertEqual( ss["t"]["fileName"].getValue(), "{}/test.####.txt".format(self.temporaryDirectory()))
def ensure_latest_version(): other_location = check_other_version() if not other_location: return dropbox_exe = os.path.join(other_location, u'dropboxd').encode(sys.getfilesystemencoding()) cmd = [dropbox_exe, '/newerversion'] + sys.argv[1:] subprocess.check_call(cmd, close_fds=True, cwd=u'/') raise Exception('The newer version exited without killing us')
def up_node3(): subproc.check_call(['vagrant', 'destroy', '-f', 'node3']) subproc.check_call(['vagrant', 'up', 'node3']) yield "node3 is ready" print("Destroying node3...") subproc.call(['vagrant', 'destroy', '-f', 'node3']) print("Node3 is destroyed.")
def up_node1(): subproc.check_call(['vagrant', 'destroy', '-f', 'node1']) subproc.check_call(['vagrant', 'up', 'node1', '--no-provision']) yield "node1 is ready" print("Destroying node1...") subproc.call(['vagrant', 'destroy', '-f', 'node1']) print("Node1 is destroyed.")
def compile_runner(): # where is it? cwd = os.path.dirname(os.path.realpath(__file__)) run_path = os.path.join(cwd, "benchmarks/run.rkt") # compile it print "compiling benchmark runner..." subprocess.check_call(["raco", "make", run_path])
def ensure_latest_version(): alt_path = check_other_version() if not alt_path: return launch_exe = os.path.join(alt_path, u'Contents', u'MacOS', BUILD_KEY) launch_args = [launch_exe, u'/newerversion'] + os.sys.argv[2:] TRACE('launching newer version from alt path. Command: %s', launch_args) subprocess.check_call(launch_args, close_fds=True) raise Exception('The newer version exited without killing us')
def build_benchmark(self, CC, CFLAGS, LD): os.chdir(self.benchmark_path) if "Makefile" in os.listdir(os.getcwd()): subprocess.check_call(["make", "clean"]) pass subprocess.check_call(["./bootstrap"]) subprocess.check_call(["automake"]) subprocess.check_call(["./configure", "CC=" + CC, "LD=" + LD, "CFLAGS=" + CFLAGS]) subprocess.check_call(["make"], stderr=subprocess.STDOUT)
def set_veth_mac(veth_name_host, mac): """ Set the veth MAC address. :param veth_name_host: The name of the veth. :param mac: The MAC address. :return: None. Raises CalledProcessError on error. """ # TODO MAC should be an EUI object. check_call(["ip", "link", "set", "dev", veth_name_host, "address", mac], timeout=IP_CMD_TIMEOUT)
def run_aviz(aviz, timeout): """ run aviz with a file """ try: subprocess.check_call(aviz, timeout=timeout) except subprocess.TimeoutExpired: # if timeout occurs then everything # is okay (i.e. aviz did not crash) pass
def export(server, id, filepath): """Saves a SQL dump of the database to the given file. Also creates <filename>.version and <filename>.md5sum files in the same directory. """ directory = os.path.dirname(filepath) base_path = '.'.join(filepath.split('.')[:-1]) # remove extension from filename version_filename = '{}.version'.format(base_path) checksum_filename = '{}.md5sum'.format(base_path) with closing(server.get_connection()) as cnx: if not pham.query.database_exists(cnx, id): raise DatabaseDoesNotExistError('No such database: {}'.format(id)) if os.path.exists(filepath): raise IOError('File already exists: {}'.format(filepath)) if os.path.exists(version_filename): raise IOError('File already exists: {}'.format(version_filename)) if os.path.exists(checksum_filename): raise IOError('File already exists: {}'.format(checksum_filename)) if directory != '' and not os.path.exists(directory): os.makedirs(directory) # export database to sql file using mysqldb command line program host, user, password = server.get_credentials() command = ['mysqldump', '--host', host, '--user', user] if password != '' and password is not None: command += ['--password', password] command.append(id) with open(filepath, 'w') as output_file: with open(os.devnull, 'wb') as DEVNULL: subprocess32.check_call(command, stdout=output_file, stderr=DEVNULL) # write .version file with closing(server.get_connection(database=id)) as cnx: version_number = pham.query.version_number(cnx) with open(version_filename, 'w') as out_file: out_file.write('{}\n'.format(version_number)) # calculate checksum m = hashlib.md5() with open(filepath, 'rb') as sql_file: while True: data = sql_file.read(8192) if data == '': break m.update(data) # write .md5sum file checksum = m.hexdigest() with open(checksum_filename, 'w') as out_file: out_file.write('{} {}\n'.format(checksum, filepath))
def callback(ch, method, properties, body): msg = json.loads(body.decode('utf-8')) task_id = msg['id'] mbtiles_file = task_id + '.mbtiles' source = 'tmsource://' + os.path.abspath(tm2source) sink = 'mbtiles://' + os.path.abspath(mbtiles_file) tilelive_cmd = [] if msg['type'] == 'pyramid': pyramid = msg['pyramid'] tileinfo = pyramid['tile'] tilelive_cmd = render_pyramid_command( source, sink, bounds=create_tilelive_bbox(pyramid['bounds']), min_zoom=tileinfo['min_zoom'], max_zoom=tileinfo['max_zoom'] ) elif msg['type'] == 'list': list_file = '/tmp/tiles.txt' with open(list_file, 'w') as fh: write_list_file(fh) tilelive_cmd = render_tile_list_command( source, sink, list_file=list_file, ) else: raise ValueError("Message must be either of type pyramid or list") try: start = time.time() subprocess.check_call(tilelive_cmd, timeout=5*60) end = time.time() print('Rendering time: {}'.format(humanize.naturaltime(end - start))) print('Optimize MBTiles file size') optimize_mbtiles(mbtiles_file) upload_mbtiles(bucket, mbtiles_file) os.remove(mbtiles_file) print('Upload mbtiles {}'.format(mbtiles_file)) download_link = s3_url(host, port, bucket_name, mbtiles_file) result_msg = create_result_message(task_id, download_link, msg) durable_publish(channel, result_queue_name, body=json.dumps(result_msg)) channel.basic_ack(delivery_tag=method.delivery_tag) except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as e: durable_publish(channel, failed_queue_name, body=body) channel.basic_ack(delivery_tag=method.delivery_tag) channel.stop_consuming() time.sleep(5) # Give RabbitMQ some time raise e
def check_call(self, command): """ Run a command within the named namespace. :param command: The command to run. :param shell: Whether this is a shell command. :param timeout: Command timeout in seconds. """ command = self._get_nets_command(command) _log.debug("Run command: %s", command) check_call(command, timeout=IP_CMD_TIMEOUT)
def install_python_dependencies(python_dependencies, upgrade=False): if not python_dependencies: return command_terms = ['pip', 'install'] if upgrade: command_terms.append('-U') try: subprocess.check_call(command_terms + python_dependencies) except subprocess.CalledProcessError: raise DependencyError('Dependencies not installed (%s).' % ', '.join( python_dependencies))
def compileShader( self, sourceFileName ) : outputFileName = self.temporaryDirectory() + "/" + os.path.splitext( os.path.basename( sourceFileName ) )[0] + ".oso" subprocess.check_call( [ "oslc", "-q" ] + [ "-I" + p for p in os.environ.get( "OSL_SHADER_PATHS", "" ).split( ":" ) ] + [ "-o", outputFileName, sourceFileName ] ) return os.path.splitext( outputFileName )[0]
def add_node(bootstrap, up_node2, up_node3): subproc.check_call([ 'vagrant', 'ssh', 'node1', '-c', 'cd /vagrant/bootstrap_test && sudo ansible-playbook \ -i host_vars/test-nodes distribute_ssh_key.yaml' ]) subproc.check_call([ 'vagrant', 'ssh', 'node1', '-c', 'sudo lainctl node add -p /vagrant/playbooks node2:192.168.77.22 ' + 'node3:192.168.77.23' ])
def make_iso(directory, iso_path): with open(os.devnull, "w") as DEVNULL: if sys.platform.startswith('linux'): subprocess32.check_call([ 'genisoimage', '-RJ', '-max-iso9660-filenames', '-o', iso_path, directory ], stderr=STDOUT if debug else DEVNULL) elif sys.platform == 'darwin': subprocess32.check_call([ 'hdiutil', 'makehybrid', '-hfs', '-joliet', '-iso', '-o', iso_path, directory ], stderr=STDOUT if debug else DEVNULL) else: raise NotImplementedError("Unsupported operating system!")