def run_bjail(args: Namespace) -> None: instance_name = args.instance_name try: run_instace( instance_name, args.args_to_instance, args.wait, args.dry_run, args.debug_bwrap_args, args.debug_shell, args.debug_log_dbus, args.debug_helper_script, ) except Exception: from os import isatty from sys import stderr if not isatty(stderr.fileno()): from subprocess import run as subprocess_run from traceback import format_exc try: subprocess_run(('notify-send', '--urgency', 'critical', '--icon', 'bubblejail-config', f"Failed to run instance: {instance_name}", f"Exception: {format_exc(0)}")) except FileNotFoundError: # Make notify-send optional ... raise
def run(self, command, *args, **kws): if 'mkfs.ext4' in command: if '-d' in command.split(): # Simulate a failing call on <= Ubuntu 16.04 where mkfs.ext4 # doesn't yet support the -d optio.n return SimpleNamespace(returncode=1) # Otherwise, pretend to have created an ext4 file system. pass elif command.startswith('sudo mount'): # We don't want to require sudo for the test suite, so let's not # actually do the mount. Instead, just record the mount point, # which will be a temporary directory, so that we can verify its # contents later. self.mountpoint = command.split()[-1] elif command.startswith('sudo umount'): # Just ignore the umount command since we never mounted anything, # and it's a temporary directory anyway. pass elif command.startswith('sudo cp'): # Pass this command upward, but without the sudo. subprocess_run(command[5:], *args, **kws) # Now, because mount() called from mkfs_ext4() will cull its own # temporary directory, and that tempdir is the mountpoint captured # above, copy the entire contents of the mount point directory to # a results tempdir that we can check below for a passing grade. copytree(self.mountpoint, self.results_dir)
def run(self, command, *args, **kws): if 'mkfs.ext4' in command: if '-d' in command.split(): # Simulate a failing call on <= Ubuntu 16.04 where mkfs.ext4 # doesn't yet support the -d optio.n return SimpleNamespace(returncode=1) # Otherwise, pretend to have created an ext4 file system. pass elif command.startswith('sudo mount'): # We don't want to require sudo for the test suite, so let's not # actually do the mount. Instead, just record the mount point, # which will be a temporary directory, so that we can verify its # contents later. self.mountpoint = command.split()[-1] elif command.startswith('sudo umount'): # Just ignore the umount command since we never mounted anything, # and it's a temporary directory anyway. pass elif command.startswith('sudo cp'): # Pass this command upward, but without the sudo. subprocess_run(command[5:], *args, **kws) # Now, because mount() called from mkfs_ext4() will cull its own # temporary directory, and that tempdir is the mountpoint captured # above, copy the entire contents of the mount point directory to # a results tempdir that we can check below for a passing grade. copytree(self.mountpoint, self.results_dir) # We also want to somehow test if, when requested, the cp call has # the --preserve=ownership flag present. There's no other nice # way of mocking this as everything else would require root. if re.search(r'--preserve=[^ ]*ownership', command): self.preserves_ownership = True
def generate(self, project): LOG.debug("Generate started") root = project.root / "cmd" # project folder structure src = root / "resource" format_paths = [] LOG.debug("Writing Types") models = resolve_models(project.schema) template = self.env.get_template("types.go.tple") path = src / "{}.go".format("model") contents = template.render(models=models) project.overwrite(path, contents) format_paths.append(path) path = root / "main.go" LOG.debug("Writing project: %s", path) template = self.env.get_template("main.go.tple") importpath = Path(project.settings["importpath"]) contents = template.render(path=importpath / "cmd" / "resource") project.overwrite(path, contents) format_paths.append(path) # named files must all be in one directory for path in format_paths: try: subprocess_run(["go", "fmt", path], cwd=root, check=True) except (FileNotFoundError, CalledProcessError) as e: raise DownstreamError("go fmt failed") from e
def generate(self, project): LOG.debug("Generate started") root = project.root / "cmd" # project folder structure src = root / "resource" format_paths = [] LOG.debug("Writing Types") models = resolve_models(project.schema) template = self.env.get_template("types.go.tple") path = src / "{}.go".format("model") contents = template.render(models=models) project.overwrite(path, contents) format_paths.append(path) path = root / "main.go" LOG.debug("Writing project: %s", path) template = self.env.get_template("main.go.tple") importpath = Path(project.settings["importpath"]) contents = template.render(path=importpath / "cmd" / "resource") project.overwrite(path, contents) format_paths.append(path) # Makefile path = project.root / "Makefile" LOG.debug("Writing Makefile: %s", path) template = self.env.get_template("Makefile") contents = template.render() project.overwrite(path, contents) # named files must all be in one directory for path in format_paths: try: subprocess_run(["go", "fmt", path], cwd=root, check=True, capture_output=True) except (FileNotFoundError, CalledProcessError) as e: raise DownstreamError("go fmt failed") from e # Update settings as needed need_to_write = False for key, new in DEFAULT_SETTINGS.items(): old = project.settings.get(key) if project.settings.get(key) != new: LOG.debug(f"{key} version change from {old} to {new}") project.settings[key] = new need_to_write = True if key == "pluginVersion": # Display any upgrade messages print(*check_version(old), sep="\n") if need_to_write: project.write_settings()
def __call__(self, generated: pathlib.Path) -> pathlib.Path: run_args = self._command_line + [str(generated)] # If a python file is passed in we prepend the python executable currently in-use # to reduce inconsistencies between the environment nunavut is running in and # the default environment for a given system. if len(run_args) > 0 and str(run_args[0]).endswith(".py"): run_args = [sys.executable] + run_args subprocess_run(run_args, check=self._check) return generated
def create_policy(policy_name, policy_path): """ create policy """ policy_script = policy_path + 'policy.script' policy_vkey = policy_path + 'policy.vkey' policy_skey = policy_path + 'policy.skey' policy = {} policy['policy_script'] = policy_script policy['policy_vkey'] = policy_vkey policy['policy_skey'] = policy_skey # check if token exists # if so, returns existing policy if path.exists(policy_script): print("Policy exists : no policy created for", policy_name) policy_id = subprocess_run([ 'cardano-cli', 'transaction', 'policyid', '--script-file', policy_script ], capture_output=True) policy['policy_id'] = policy_id.stdout.decode().replace('\n', '') return policy makedirs(policy_path, mode=0o777, exist_ok=True) rc = subprocess_run([ 'cardano-cli', 'address', 'key-gen', '--verification-key-file', policy_vkey, '--signing-key-file', policy_skey ], capture_output=False) # create policy script keyhash = subprocess_run([ 'cardano-cli', 'address', 'key-hash', '--payment-verification-key-file', policy_vkey ], capture_output=True, text=True) data = {} data['keyHash'] = keyhash.stdout.replace('\n', '') data['type'] = 'sig' with open(policy_script, 'w') as outfile: json_dump(data, outfile) # get policy id policy_id = subprocess_run([ 'cardano-cli', 'transaction', 'policyid', '--script-file', policy_script ], capture_output=True, text=True) policy['policy_id'] = policy_id.stdout.replace('\n', '') return policy
def run(*args, **kwargs): '''Wrapper for run, to show the commands being run.''' show_command = kwargs.get('show_command', None) if show_command is not None: if show_command: print(' '.join(args[0])) del kwargs['show_command'] subprocess_run(*args, **kwargs)
def get_protocol_parameters(network, protparams_file): """ get protocol parameters """ network_name = network['network'] network_magic = str(network['network_magic']) network_era = network['network_era'] env_param = network['env'] subprocess_run(['cardano-cli', 'query', 'protocol-parameters', network_name, network_magic, network_era, '--out-file', protparams_file], \ capture_output=False, text=True, env=env_param) return
def run_task(self, task: Task) -> None: """Run the python file defined by Task.loc in the environment defined by the Task.env Args: task (`Task`): The task to be run. """ assert hash(task) in [hash(t) for t in self.dag.tasks], \ ValueError(f'{task} is not in the dag') print(f'\nRunning {repr(task)}\n', flush=True) subprocess_run(f'{task.env} {task.loc}', shell=True, check=True)
def send_email(headline, result, error_str=None): body = (headline + f"Duration: {timer.timedelta}\n" f"Execution directory: {execution_dir}\n") if jobname: body += f"Job name: {jobname}\n" if error_str: body += f"Error: {error_str}\n" if email_config["include-log"]: # Sync first, in the hope that the log will flush to disk before we read it. # Note: # Currently raised exceptions haven't been printed yet, # so they aren't yet in the log file in your email. # They'll only be present in the on-disk logfile. try: # This can hang, apparently. # Hangs like this might be fairly damaging, unfortunately. # According to Ken: # >If sync is trying to write a file down to disk that was deleted, # >it can hang like that. Unfortunately, the node will have to be # >power cycled to deal with this situation. # # Let's hope that's not common. # We'll just timeout the ordinary way and hope for the best. subprocess_run("sync", timeout=10.0) time.sleep(2.0) except TimeoutExpired: logger.warning( "Timed out while waiting for filesystem sync") body += "\nLOG (possibly truncated):\n\n" with open(f'{logpath}', 'r') as log: body += log.read() msg = MIMEText(body) msg['Subject'] = f'Workflow exited: {result}' msg['From'] = f'flyemflows <{user}@{host}>' msg['To'] = ','.join(addresses) try: s = smtplib.SMTP('mail.hhmi.org') s.sendmail(msg['From'], addresses, msg.as_string()) s.quit() except: msg = ( "Failed to send completion email. Perhaps your machine " "is not configured to send login-less email, which is required for this feature." ) logger.error(msg)
def query_scrt_swap(nonce: int, contract_addr: str) -> str: query_str = swap_json(nonce) cmd = [ 'secretcli', 'query', 'compute', 'query', contract_addr, f"{query_str}" ] p = subprocess_run(cmd, stdout=PIPE, stderr=PIPE, check=True) return p.stdout.decode()
def run(command, sudo=False, cwd=None, capture=True, quiet=False, stdin=None, allow_error=False): """ :param command: The command to run. :param sudo: Whether to execute the command using sudo(1) or not. :param cwd: Working directory. :param capture: Whether to capture stdout and stderr or not. :param quiet: Do not log the command. :param stdin: Input string. :param allow_error: Whether to allow error or not. :return: The captured standard output. """ prefix = ['sudo'] if sudo else [] cmd = prefix + command if not quiet: log(LogLevel.fine, ' '.join(cmd)) file = PIPE if capture else None try: completed = subprocess_run(cmd, cwd=cwd, stdout=file, stderr=file, check=True, encoding='utf-8', input=stdin) if capture: return completed.stdout else: return None except CalledProcessError as e: if allow_error: return None log(LogLevel.error, 'Error while running: {}', ' '.join(cmd)) log(LogLevel.error, 'Working directory: {}', cwd) log(LogLevel.error, 'Return code: {}', e.returncode) if capture: log(LogLevel.error, e.stderr) raise e
def _build(self, base_path): LOG.debug("Dependencies build started from '%s'", base_path) # TODO: We should use the build logic from SAM CLI library, instead: # https://github.com/awslabs/aws-sam-cli/blob/master/samcli/lib/build/app_builder.py command = self._make_build_command(base_path, self._build_command) if self._use_docker: command = command + " --use-container" command = command + " " + MAIN_HANDLER_FUNCTION LOG.debug("command is '%s'", command) LOG.warning("Starting build.") try: completed_proc = subprocess_run( # nosec ["/bin/bash", "-c", command], stdout=PIPE, stderr=PIPE, cwd=base_path, check=True, ) except (FileNotFoundError, CalledProcessError) as e: raise DownstreamError("local build failed") from e LOG.debug("--- build stdout:\n%s", completed_proc.stdout) LOG.debug("--- build stderr:\n%s", completed_proc.stderr) LOG.debug("Dependencies build finished")
def perform_test(test_config: TestConfig) -> Optional[UnitTestCorrectionResult]: test_id: int = test_config['id'] file_to_test_path: Path = cwd / folder_name / f"{file_name}_{test_id}.py" if not file_to_test_path.exists: print(f"File {file_to_test_path} does not exist!", file=stderr) return None test_file_path: Path = cwd / folder_name / f"{test_file_name}_{test_id}.py" test_file_path.write_text( test_file_content.replace(f"from {file_name} import", f"from {str(file_to_test_path.name)[:-3]} import") ) completed_process: CompletedProcess = subprocess_run( f"(cd {folder_name} && timeout 2 python -m unittest {test_file_path.name})", capture_output=True, shell=True, text=True, ) return UnitTestCorrectionResult( testId=test_id, description=test_config['description'], shouldFail=test_config['shouldFail'], testSuccessful=completed_process.returncode == 0, stdout=completed_process.stdout[:10_000].split("\n")[:50], stderr=completed_process.stderr[:10_000].split("\n")[:50], )
def get_policy(policy_name, policy_path): """ get policy """ if policy_name is None: return {} policy_script = policy_path + 'policy.script' policy_vkey = policy_path + 'policy.vkey' policy_skey = policy_path + 'policy.skey' policy = {} policy['policy_script'] = policy_script policy['policy_vkey'] = policy_vkey policy['policy_skey'] = policy_skey # check if policy script exists # if so, returns existing policy if (path.exists(policy_script)): run_params = [ 'cardano-cli', 'transaction', 'policyid', '--script-file', policy_script ] policy_id = subprocess_run(run_params, capture_output=True) policy['policy_id'] = policy_id.stdout.decode().replace('\n', '') return policy else: return {}
def run(args, shell=False, check=True): token = os.environ.get("GH_AUTH").encode('utf-8') if not shell: command = ' '.join(map(shlex.quote, args)) else: command = args command = command.replace(token.decode('utf-8'), '~'*len(token)) print(command) sys.stdout.flush() if token: stdout = stderr = PIPE else: stdout = stderr = None p = subprocess_run(args, stdout=stdout, stderr=stderr, shell=shell, check=check) if token: # XXX: Do this in a way that is streaming out, err = p.stdout, p.stderr out = out.replace(token, b"~"*len(token)) err = err.replace(token, b"~"*len(token)) if out: print(out.decode('utf-8')) if err: print(err.decode('utf-8'), file=sys.stderr) sys.stdout.flush() sys.stderr.flush() return p.returncode
def test_pvacseq_commands(self): pvac_script_path = os.path.join( self.pVac_directory, 'tools', 'pvacseq', "main.py" ) usage_search = re.compile(r"usage: ") for command in [ "binding_filter", "coverage_filter", "run", "generate_protein_fasta", "install_vep_plugin", "download_example_data", "valid_alleles", "config_files", ]: result = subprocess_run([ sys.executable, pvac_script_path, command, '-h' ], shell=False, stdout=PIPE) self.assertFalse(result.returncode) self.assertRegex(result.stdout.decode(), usage_search)
def _pip_build(cls, base_path, output_path): tmp_path = Path(tempfile.mkdtemp()) try: build_path = tmp_path / "build" shutil.copytree(base_path, build_path) command = cls._make_pip_command(build_path) LOG.debug("command is '%s'", command) LOG.info("Starting pip build.") try: completed_proc = subprocess_run( # nosec command, cwd=build_path, check=True, stdout=PIPE, stderr=PIPE) except (FileNotFoundError, CalledProcessError) as e: raise TaskCatException("pip build failed") from e LOG.debug("--- pip stdout:\n%s", completed_proc.stdout) LOG.debug("--- pip stderr:\n%s", completed_proc.stderr) cls._zip_dir(build_path, output_path) shutil.rmtree(tmp_path, ignore_errors=True) except Exception as e: # pylint: disable=broad-except shutil.rmtree(tmp_path, ignore_errors=True) raise e
def get_utxo_from_wallet(network, address): """ get utxo from wallet """ if address is None: print("address empty") return network_name = network['network'] network_magic = str(network['network_magic']) network_era = network['network_era'] env_param = network['env'] utxo = {} tx_disp = subprocess_run(['cardano-cli', 'query', 'utxo', network_name, network_magic, network_era, '--address', address], \ capture_output=True, text=True, env=env_param) tx_list = tx_disp.stdout.split('\n') utxo['raw'] = tx_list tx_list = [tx.split() for tx in tx_list[2:] if tx != ""] t_list = [["--tx-in", tx[0] + '#' + tx[1]] for tx in tx_list] # flatten list utxo['in_utxo'] = [y for x in t_list for y in x] utxo['count_utxo'] = len(tx_list) tx_list = [tx[2:] for tx in tx_list] tx_list = [split_list(tx) for tx in tx_list] # flatten list t_list = [y for x in tx_list for y in x] utxo['tokens'] = [(token[1], int(token[0])) for token in t_list] return utxo
def getGitRemote(self, localBranch: str = None) -> str: if localBranch is None: localBranch = self.getGitLocalBranch() try: command = "git" arguments = ("config", "branch.{localBranch}.remote".format( localBranch=localBranch)) completed = subprocess_run((command, *arguments), stdout=PIPE, stderr=PIPE) except: raise Exception if completed.returncode == 0: return completed.stdout.decode('utf-8').split("\n")[0] elif completed.returncode == 1: self.WriteWarning( "Branch '{localBranch}' is not pushed to a remote.".format( localBranch=localBranch)) return "(local) {localBranch}".format(localBranch=localBranch) else: message = completed.stderr.decode('utf-8') self.WriteFatal("Message from '{command}': {message}".format( command=command, message=message)) raise Exception
def query_scrt_swap(nonce: int, scrt_swap_address: str, token: str) -> str: query_str = swap_json(nonce, token) cmd = [ 'secretcli', 'query', 'compute', 'query', scrt_swap_address, f"{query_str}" ] p = subprocess_run(cmd, stdout=PIPE, stderr=PIPE, check=True) return p.stdout.decode()
def checksum_git(s): cksum_command = "cd ~/morbidostat/ && git rev-parse HEAD" (stdin, stdout, stderr) = s.exec_command(cksum_command) checksum_worker = stdout.readlines()[0].strip() checksum_leader = subprocess_run(cksum_command, shell=True, capture_output=True, universal_newlines=True).stdout.strip() assert ( checksum_worker == checksum_leader ), f"checksum on git failed, {checksum_worker}, {checksum_leader}. Update leader, then try running `mba sync`"
def calculate_ttl(network): FORWARD_SLOT = 300 run_params = [ 'cardano-cli', 'query', 'tip', network['network'], str(network['network_magic']) ] tip = subprocess_run(run_params, capture_output=True, text=True) slot = int(json_loads(tip.stdout).get('slotNo')) + FORWARD_SLOT return slot
def run_command(command: str, args: List[str]) -> str: try: return subprocess_run([command] + (args if args else []), shell=True, capture_output=True, text=True, check=True).stdout except CalledProcessError as e: if e.returncode != 3010: raise
def sign_send_transaction(network, skey, ok_fee_file, sign_file): """ sign transfer transaction """ network_name = network['network'] network_magic = str(network['network_magic']) rc = subprocess_run(['cardano-cli', 'transaction', 'sign', network_name, network_magic, '--signing-key-file', skey, \ '--tx-body-file', ok_fee_file, '--out-file', sign_file], \ capture_output=False, text=True) return
def sign_mint_transaction(network, skey, policy, ok_fee_file, sign_file): """ sign mint transaction """ network_name = network['network'] network_magic = str(network['network_magic']) rc = subprocess_run(['cardano-cli', 'transaction', 'sign', network_name, network_magic, '--signing-key-file', skey, '--signing-key-file', policy['policy_skey'], \ '--script-file', policy['policy_script'], '--tx-body-file', ok_fee_file, '--out-file', sign_file], \ capture_output=False, text=True) return
def create_keypair(address_type, addresses_path, address_prefix, name): """ create keypair based on address_name """ vkey_file = get_vkey_file(addresses_path, address_prefix, name) skey_file = get_skey_file(addresses_path, address_prefix, name) if (path.exists(vkey_file)): print(address_prefix, "key pair already exists for", name) return makedirs(path.dirname(vkey_file), mode=0o777, exist_ok=True) run_params = [ 'cardano-cli', address_type, 'key-gen', '--verification-key-file', vkey_file, '--signing-key-file', skey_file ] subprocess_run(run_params, capture_output=False, text=True) return
def run(co): print(co) return subprocess_run(co, shell=True, stdout=PIPE, stderr=PIPE, check=True, universal_newlines=True)
def submit_transaction(network, sign_file): """ submit signed transaction on the network """ network_name = network['network'] network_magic = str(network['network_magic']) env_param = network['env'] rc = subprocess_run(['cardano-cli', 'transaction', 'submit', network_name, network_magic, '--tx-file', sign_file], \ capture_output=False, text=True, env=env_param) return rc
def get_protocol_keydeposit(network): """ get keyDeposit parameter from protocol """ network_name = network['network'] network_magic = str(network['network_magic']) network_era = network['network_era'] env_param = network['env'] rc = subprocess_run(['cardano-cli', 'query', 'protocol-parameters', network_name, network_magic, network_era], \ capture_output=True, text=True, env=env_param) return int(json_loads(rc.stdout)['keyDeposit'])
def _process(self, cmd: str, msg: str) -> bool: journal.send(msg) sleep(self.CMD_PRE_SLEEP) proc = subprocess_run(cmd, shell=True, timeout=self.PROCESS_TIMEOUT) if proc.returncode != 0: journal.send(f"Subprocess for command '{cmd}' returned an error!") return False return True
def run(command, **args): if 'shell' not in args: command = command.split() proc = subprocess_run( command, stdout=PIPE, stderr=PIPE, universal_newlines=True, **args) if proc.returncode != 0: sys.stderr.write(proc.stdout) sys.stderr.write(proc.stderr) proc.check_returncode() return proc
def run(command, *, check=True, **args): runnable_command = ( command.split() if isinstance(command, str) and 'shell' not in args else command) stdout = args.pop('stdout', PIPE) stderr = args.pop('stderr', PIPE) proc = subprocess_run( runnable_command, stdout=stdout, stderr=stderr, universal_newlines=True, **args) if check and proc.returncode != 0: _logger.error('COMMAND FAILED: %s', command) if proc.stdout is not None: _logger.error(proc.stdout) if proc.stderr is not None: _logger.error(proc.stderr) proc.check_returncode() return proc
def run(*args, env = None, **kwargs): tls_env = getattr(tls, 'env', {}) if cfg_env or tls_env: new_env = {} if env is None: env = environ for k, v in env.items(): new_env[fsencode(k)] = fsencode(v) new_env.update(cfg_env) for k, v in tls_env.items(): k = fsencode(k) if k in new_env: continue try: v = fsencode(v) except: pass else: new_env[k] = v kwargs = dict(kwargs, env = new_env) return subprocess_run(*args, **kwargs)