Ejemplo n.º 1
0
 def wait_check_process(Popen_object,
                        check_exitCode=True,
                        check_err=True,
                        timeout=None,
                        raise_error=False):
     try:
         exitCode = Popen_object.wait(timeout=timeout)
     except subprocess.TimeoutExpired:
         Popen_object.kill()
         Popen_object.wait()
         raise
     try:
         Popen_object.duration = time.time() - Popen_object.time_started
     except AttributeError:
         pass
     if check_exitCode and exitCode:
         msg = "Command '{Popen_object.args}' returned Nonzero exit status " \
               "" \
               "{exitCode} after duration {" \
               "Popen_object.duration}.".format_map(locals())
         if raise_error: raise subprocess.SubprocessError(msg)
         warn(msg)
     if check_err:
         err = Popen_object.stderr.read()
         if err:
             msg = "Command '{Popen_object.args}' returned following value " \
                   "" \
                   "for stderr after duration {Popen_object.duration}:\n{" \
                   "err}".format_map(locals())
             if raise_error: raise subprocess.SubprocessError(msg)
             warn(msg)
Ejemplo n.º 2
0
 def run(self, args, **kwargs):
     logger.debug(f'{self.__class__.__name__} run command: {args} {kwargs}')
     shutils_kwargs = {
         k: v
         for k, v in kwargs.items()
         if k not in rpyc.core.protocol.DEFAULT_CONFIG
     }
     try:
         output = self.modules.bbtest.target.subprocess_run(
             args, **shutils_kwargs)
     except Exception as e:
         raise subprocess.SubprocessError(
             f'subprocess run "{args} {kwargs}" failed on target - {e}')
     if output.returncode > 0:
         raise subprocess.SubprocessError(
             f'subprocess run "{args} {kwargs}" failed on target\n'
             f'stdout = {output.stdout}\n'
             f'stderr = {output.stderr}')
     logger.debug(
         f'{self.__class__.__name__} run raw stdout: {output.stdout}')
     parsed_output = [] if output.stdout == b'' else output.stdout.decode(
         'utf-8').splitlines()
     logger.debug(
         f'{self.__class__.__name__} run parsed stdout: {parsed_output}')
     return parsed_output
Ejemplo n.º 3
0
    def _test_bin(self):
        try:
            with subprocess.Popen(self._ap,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE) as proc:
                try:
                    self._bin_test_output, _ = proc.communicate(timeout=2)

                    retcode = proc.returncode
                    if retcode != 0:
                        raise subprocess.SubprocessError(
                            "Error running AtomicParsley: <code {}>".format(
                                retcode)) from None
                except subprocess.TimeoutExpired:
                    proc.kill()
                    proc.wait()
                    raise subprocess.SubprocessError(
                        "Error running AtomicParsley: <timeout>") from None

        except FileNotFoundError:
            raise lib_exceptions.APError(
                "AtomicParsley binary not found") from None

        self._tested = True
        return
Ejemplo n.º 4
0
def check_sshfs_installation():
    """
    Utility function to check sshfs installation and user permission to execute sshfs.
    Raises errors with hints to install sshfs based on the platform
    """
    sshfs_executable = shutil.which('sshfs')
    if not sshfs_executable:
        if platform.system().lower() == 'darwin':
            raise subprocess.SubprocessError(
                'Could not find sshfs installation. \n'
                'To install sshfs please run the following commands:\n'
                'brew install macfuse \n'
                'brew install gromgit/fuse/sshfs\n'
                'or alternatively follow the instructions here:'
                'https://osxfuse.github.io/')
        else:
            raise subprocess.SubprocessError(
                'Could not find sshfs installation. \n'
                'To install sshfs please run the following commands:\n'
                'apt-get update\n'
                'apt-get install sshfs')

    # check user permission to execute sshfs
    if not os.access(sshfs_executable, os.EX_OK):
        username = get_username()
        raise PermissionError(
            f'User {username} does not have sufficient permissions to run sshfs command.'
        )
Ejemplo n.º 5
0
def sync(src: str, tar: str) -> None:
    assert src != "" and tar != ""

    outb = diff(src, tar)
    for f1, f2 in files_differ(outb):

        s = ""
        d = ""

        if f1.startswith(src) and f2.startswith(tar):
            s = f1
            d = f2
        elif f1.startswith(tar) and f2.startswith(src):
            s = f2
            d = f1
        else:
            raise subprocess.SubprocessError(f"unknown path: {f1}, {f2}")

        rm(d)
        cp(s, d)

    for fd, fs in only_in(outb):

        path = os.path.join(fd, fs)

        if path.startswith(src):
            s = path
            d = path.replace(src, tar)
            cp(s, d)
        elif path.startswith(tar):
            rm(path)
        else:
            raise subprocess.SubprocessError(f"unknown path: {path}")
def copyToHdfs(fileName, hdfsDir, hdfsName):
    """ копирует файл в HDFS "таблицу" или удаляет директорию, перенаправляет протоколы, проверяет результат """
    stdErr = open('hdfs_stderr.txt', 'w')
    stdOut = open('hdfs_stdout.txt', 'w')
    if hdfsName:  # если имя файла есть - копируем
        print("Copying", fileName, "into HDFS...")
        res = subprocess.run(["hdfs", "dfs", "-mkdir", "-p", hdfsDir],
                             stderr=stdErr,
                             stdout=stdOut)
    else:  # иначе - удаляем директорию
        print("Deleting", hdfsDir, "from HDFS...")
        res = subprocess.run(
            ["hdfs", "dfs", "-rm", "-r", "-f", "-skipTrash", hdfsDir],
            stderr=stdErr,
            stdout=stdOut)
    stdOut.close()
    stdErr.close()
    if res.returncode != 0:
        raise (subprocess.SubprocessError(
            "There were errors while working with dir, check hdfs_stderr.txt"))
    if not hdfsName:  # для удаления директории - все сделали, возвращаемся
        print("Done")
        return
    stdErr = open('hdfs_stderr.txt', 'w')
    stdOut = open('hdfs_stdout.txt', 'w')
    res = subprocess.run(
        ["hdfs", "dfs", "-put", "-f", fileName, hdfsDir + "/" + hdfsName],
        stderr=stdErr,
        stdout=stdOut)
    stdOut.close()
    stdErr.close()
    if res.returncode != 0:
        raise (subprocess.SubprocessError(
            "There were errors while copying, check hdfs_stderr.txt"))
    print("Done")
Ejemplo n.º 7
0
def run_ert_subprocess(command: str, cwd: pathlib.Path, runpath: str,
                       timeout: int) -> None:
    """
    Helper function to run a ERT setup.

    Should revert here to use the much simpler subprocess.run when
    https://github.com/equinor/libres/issues/984 is closed. See
    https://github.com/equinor/flownet/pull/119, and
    https://github.com/equinor/flownet/pull/271,
    on possible changes to revert.

    Args:
        command: Command to run.
        cwd: The folder to run the command from.
        runpath: Runpath variable given to ERT.
        timeout: inactivity time out for killing FlowNet.

    Returns:
        Nothing

    """
    with subprocess.Popen(
            command,
            cwd=cwd,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            universal_newlines=True,
    ) as process:

        def _handler(*args):  # pylint: disable=unused-argument
            main_proc = psutil.Process(process.pid)
            for child_proc in main_proc.children(recursive=True):
                child_proc.kill()
            main_proc.kill()

            raise subprocess.SubprocessError(
                f"The ERT process has not returned any output for {timeout} seconds.\n"
                "FlowNet assumes that something fishy has happened and will kill\n"
                "ERT and all suprocesses. Check the logs for details.")

        signal.signal(signal.SIGALRM, _handler)

        for line in process.stdout:  # type: ignore
            signal.alarm(timeout)

            print(line, end="")
            if ("active realisations left, which is less than "
                    "the minimum specified - stopping assimilation." in line
                    or "All realizations failed!" in line):
                process.terminate()
                error_files = glob.glob(
                    str(cwd / runpath.replace("%d", "*") / "ERROR"))
                raise subprocess.SubprocessError(
                    pathlib.Path(error_files[0]).read_text())

    if process.returncode != 0:
        raise subprocess.SubprocessError(
            "The ERT workflow failed. Check the ERT log for more details.")
Ejemplo n.º 8
0
def bedtools_intersect(
    a: PathType,
    b: PathType,
    output_path: PathType,
    write_a=True,
    nonoverlap=False,
    bedtools_exe: PathType = "bedtools",
) -> subprocess.CompletedProcess:
    """Report overlaps between two feature files.
    This is an incomplete wrapper around `bedtools intersect` version 2.27.1.
    The set of arguments here does not include all of the command-line arguments.
    Parameters
    ----------
    a : Path-like
        First feature file <bed/gff/vcf/bam>.
    b : Path-like
        Second feature file <bed/gff/vcf/bam>.
    output_bedfile : Path-like
        Name of output file. Can be compressed (`.bed.gz`).
    write_a : bool
        Write the original entry in `a` for each overlap.
    write_b : bool
        Write the original entry in `b` for each overlap.
    invert_match : bool
        Only report those entries in `a` that have no overlaps with `b`.
    bedtools_exe : Path-like
        The path to the `bedtools` executable. By default, uses `bedtools` in `$PATH`.
    Returns
    -------
    Instance of `subprocess.CompletedProcess`.
    """
    args = [str(bedtools_exe), "intersect"]
    if write_a:
        args.append("-wa")
    if nonoverlap:
        args.append("-v")
    args.extend(["-a", str(a), "-b", str(b)])

    output_bedfile = pathlib.Path(output_path)
    gzipped_output = output_bedfile.suffix == ".gz"
    openfile = gzip.open if gzipped_output else io.open
    try:
        # We cannot write stdout directly to a gzip file.
        # See https://stackoverflow.com/a/2853396/5666087
        process = subprocess.run(args,
                                 check=True,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
        if not process.stdout:
            raise subprocess.SubprocessError(
                f"empty stdout, aborting. stderr is {process.stderr.decode()}")
        with openfile(output_path, mode="wb") as f:  # type: ignore
            f.write(process.stdout)
        return process
    except subprocess.CalledProcessError as e:
        raise subprocess.SubprocessError(e.stderr.decode()) from e
Ejemplo n.º 9
0
    def run(self) -> int:
        """
        Executes the process in the current thread.
        :return int process result
        :raises ProcessException: Gets raised when the result of the process was not 0
        and "ignore_errors" was not called
        """

        self.log.info('Starting ' + self.__process_args[0])
        pipe_stdio = self.__collect_output or self._print_output or self.__stdin is not None

        args = {
            'args': self.__process_args,
            'env': self.__env,
            'cwd': self.__working_directory,
            'shell': self.__use_shell
        }

        if pipe_stdio:
            args['stdout'] = subprocess.PIPE
            args['stderr'] = subprocess.PIPE
            args['stdin'] = subprocess.PIPE

        self.__process = subprocess.Popen(**args)

        # Set the stdin again so it will be send to the process
        self.stdin(self.__stdin)

        if pipe_stdio:
            # Only start stdio threads if the output should be collected or shown
            self.__start_stdio_threads()

        # Wait for the process to complete
        self.__process.wait()
        result = self.__process.poll()
        self.log.debug('Process completed with code ' + str(result))

        # Remove object so other threads know the process terminated
        self.__process = None

        if pipe_stdio:
            # Only join stdio threads if they were started in the first place
            self.__stderr_reader.join()
            self.__stdout_reader.join()

        if not self.__ignore_errors and result != 0:
            if self.__collect_output:
                raise subprocess.SubprocessError('Process returned ' +
                                                 str(result) + '\n' +
                                                 str(self.out) + "\n" +
                                                 str(self.err))

            raise subprocess.SubprocessError('Process returned ' + str(result))
        return result
Ejemplo n.º 10
0
def generate_from_proto():

    generated_dir = SCRIPT_DIR.joinpath("generated")

    if generated_dir.exists():
        shutil.rmtree(generated_dir)

    generated_dir.mkdir(parents=False, exist_ok=False)

    protoc_ctrl = ROOT_PATH.joinpath("dev/codegen/protoc-ctrl.py")

    domain_cmd = [
        str(sys.executable),
        str(protoc_ctrl), "python_runtime", "--proto_path",
        "tracdap-api/tracdap-metadata/src/main/proto", "--proto_path",
        "tracdap-api/tracdap-config/src/main/proto", "--out",
        "tracdap-runtime/python/generated/tracdap/rt_gen/domain"
    ]

    proto_cmd = [
        str(sys.executable),
        str(protoc_ctrl), "python_proto", "--proto_path",
        "tracdap-api/tracdap-metadata/src/main/proto", "--proto_path",
        "tracdap-api/tracdap-config/src/main/proto", "--out",
        "tracdap-runtime/python/generated/tracdap/rt_gen/domain"
    ]

    domain_proc = subprocess.Popen(domain_cmd,
                                   stdout=subprocess.PIPE,
                                   cwd=ROOT_PATH,
                                   env=os.environ)
    domain_out, domain_err = domain_proc.communicate()
    domain_result = domain_proc.wait()

    print(domain_out.decode("utf-8"))

    if domain_result != 0:
        raise subprocess.SubprocessError(
            "Failed to generate domain classes from definitions")

    proto_proc = subprocess.Popen(proto_cmd,
                                  stdout=subprocess.PIPE,
                                  cwd=ROOT_PATH,
                                  env=os.environ)
    proto_out, proto_err = proto_proc.communicate()
    proto_result = proto_proc.wait()

    print(proto_out.decode("utf-8"))

    if proto_result != 0:
        raise subprocess.SubprocessError(
            "Failed to generate proto classes from definitions")
Ejemplo n.º 11
0
def proc_call(cmd, cwd=None, install_recommends=True):
    """Execute cmd (list of arguments) as a subprocess. Returned is a tuple with
    stdout and stderr, decoded if not None. If the return value is not equal 0, a
    subprocess error is raised. Timeouts will happen after 20 seconds."""
    with subprocess.Popen(
            cmd,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            bufsize=1,
            universal_newlines=False,
            cwd=cwd,
    ) as proc:
        data = []
        try:
            data = [
                d.decode(sys.getdefaultencoding(), errors="surrogateescape")
                for d in proc.communicate(timeout=20) if d
            ]
            if proc.wait():
                raise subprocess.SubprocessError(
                    "Error while executing %s\n%s\n" %
                    (" ".join(cmd), "\n".join(data)))
        except subprocess.TimeoutExpired as e:
            proc.kill()
            note = "Subprocess expired with time out: " + str(cmd) + "\n"
            poll = proc.poll()
            if poll:
                note += str(poll) + "\n"
            if data:
                raise subprocess.SubprocessError(str(data + "\n" + note))
            else:
                raise subprocess.SubprocessError("execution timed out after " +
                                                 str(e.args[1]) + " s: " +
                                                 " ".join(e.args[0]))
        except KeyboardInterrupt as e:
            sys.stderr.write("\nInterrupted; ")
            import traceback

            traceback.print_exc(file=sys.stderr)
        except FileNotFoundError:
            # program missing, try to help
            text = "Command `%s` not found." % cmd[0]
            if install_recommends and shutil.which("dpkg"):
                text += " Install it using `sudo apt install " + install_recommends
            else:
                text += " Install a TeX distribution of your choice, e.g. MikTeX or TeXlive."
            raise subprocess.SubprocessError(text) from None
        if isinstance(data, list):
            return "\n".join(data)
        return data
Ejemplo n.º 12
0
async def start_omega(timeout=60):
    port = find_free_port()

    if await is_local_omega_alive(port):
        return None

    cfg.omega.urls.quotes_server = f"http://localhost:{port}"
    account = os.environ["JQ_ACCOUNT"]
    password = os.environ["JQ_PASSWORD"]

    # hack: by default postgres is disabled, but we need it enabled for ut
    cfg_ = json.dumps(
        {"postgres": {
            "dsn": cfg.postgres.dsn,
            "enabled": "true"
        }})

    process = subprocess.Popen(
        [
            sys.executable,
            "-m",
            "omega.app",
            "start",
            "--impl=jqadaptor",
            f"--cfg={cfg_}",
            f"--account={account}",
            f"--password={password}",
            f"--port={port}",
        ],
        env=os.environ,
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE,
    )

    for i in range(timeout, 0, -1):
        await asyncio.sleep(1)
        if process.poll() is not None:
            # already exit
            out, err = process.communicate()
            logger.warning("subprocess exited, %s: %s", process.pid,
                           out.decode("utf-8"))
            raise subprocess.SubprocessError(err.decode("utf-8"))
        if await is_local_omega_alive(port):
            logger.info("omega server is listen on %s",
                        cfg.omega.urls.quotes_server)
            return process

    raise subprocess.SubprocessError("Omega server malfunction.")
Ejemplo n.º 13
0
def ssh_scp(source: str, destine: str, psw: str):
    """

    :param source:
    :param destine:
    :param psw:
    :return:
    """

    if os.path.isdir(source):
        cmd = shlex.split(
            "sshpass -p '{pws}' scp -r {source} {destine}".format(
                pws=psw, source=source, destine=destine))
    else:
        cmd = shlex.split("sshpass -p '{pws}' scp {source} {destine}".format(
            pws=psw, source=source, destine=destine))
    with sp.Popen(cmd,
                  stdin=sp.PIPE,
                  stdout=sp.PIPE,
                  stderr=sp.PIPE,
                  encoding="utf-8") as p:
        try:
            std_out, std_err = p.communicate(timeout=3600 * 4)
            print("done", str(std_out))
        except sp.TimeoutExpired as e:
            print("Timeout error", e, p.returncode)
            p.kill()
            std_out, std_err = p.communicate()
        if p.returncode != 0:  # Bad error.
            raise sp.CalledProcessError(p.returncode, std_err)
        elif len(
                std_err
        ) != 0:  # Some possible errors trowed by the running subprocess, but not critical.
            raise sp.SubprocessError(std_err)
        return std_out
Ejemplo n.º 14
0
    def test_get_settings_with_error(
        self, mock_check_output: MagicMock, _mock_logger: MagicMock
    ):
        mock_check_output.side_effect = subprocess.SubprocessError('foo')

        settings = Openvas.get_settings()

        mock_check_output.assert_called_with(['openvas', '-s'])

        self.assertFalse(settings)  # settings dict is empty

        mock_check_output.reset_mock()

        mock_check_output.side_effect = OSError('foo')

        settings = Openvas.get_settings()

        mock_check_output.assert_called_with(['openvas', '-s'])

        self.assertFalse(settings)  # settings dict is empty

        mock_check_output.reset_mock()

        # https://gehrcke.de/2015/12/how-to-raise-unicodedecodeerror-in-python-3/
        mock_check_output.side_effect = UnicodeDecodeError(
            'funnycodec', b'\x00\x00', 1, 2, 'This is just a fake reason!'
        )

        settings = Openvas.get_settings()

        mock_check_output.assert_called_with(['openvas', '-s'])

        self.assertFalse(settings)  # settings dict is empty
Ejemplo n.º 15
0
def _sync_DAPPER(self):
    """Sync DAPPER (as it currently exists, not a specific version)

    to compute-nodes, which don't have external IP addresses.
    """
    # Get list of files: whatever mentioned by .git
    repo = f"--git-dir={rc.dirs.DAPPER}/.git"
    files = sub_run(f"git {repo} ls-tree -r --name-only HEAD",
                    shell=True).split()

    def xcldd(f):
        return f.startswith("docs/") or f.endswith(".jpg") or f.endswith(
            ".png")

    files = [f for f in files if not xcldd(f)]

    with tempfile.NamedTemporaryFile("w", delete=False) as synclist:
        print("\n".join(files), file=synclist)

    print("Syncing DAPPER")
    try:
        self.rsync(rc.dirs.DAPPER, f"~/{self.xps_path.name}/DAPPER",
                   "--files-from=" + synclist.name)
    except subprocess.SubprocessError as error:
        # Suggest common source of error in the message.
        msg = error.args[0] + \
            "\nDid you mv/rm files (and not registering it with .git)?"
        raise subprocess.SubprocessError(msg) from error
Ejemplo n.º 16
0
async def test_subprocess_exceptions(caplog: LogCaptureFixture,
                                     hass: HomeAssistant) -> None:
    """Test that notify subprocess exceptions are handled correctly."""

    with patch("homeassistant.components.command_line.notify.subprocess.Popen"
               ) as check_output:
        check_output.return_value.__enter__ = check_output
        check_output.return_value.communicate.side_effect = [
            subprocess.TimeoutExpired("cmd", 10),
            None,
            subprocess.SubprocessError(),
        ]

        await setup_test_service(hass, {"command": "exit 0"})
        assert await hass.services.async_call(DOMAIN,
                                              "test", {"message": "error"},
                                              blocking=True)
        assert check_output.call_count == 2
        assert "Timeout for command" in caplog.text

        assert await hass.services.async_call(DOMAIN,
                                              "test", {"message": "error"},
                                              blocking=True)
        assert check_output.call_count == 4
        assert "Error trying to exec command" in caplog.text
Ejemplo n.º 17
0
    def test_stop_scan_with_error(self, mock_check_call: MagicMock,
                                  mock_logger: MagicMock):
        mock_check_call.side_effect = subprocess.SubprocessError('foo')

        success = Openvas.stop_scan('scan_1')

        mock_check_call.assert_called_with(
            ['openvas', '--scan-stop', 'scan_1'])

        self.assertFalse(success)

        self.assertEqual(mock_logger.warning.call_count, 1)

        mock_check_call.reset_mock()
        mock_logger.reset_mock()

        mock_check_call.side_effect = OSError('foo')

        success = Openvas.stop_scan('scan_1')

        mock_check_call.assert_called_with(
            ['openvas', '--scan-stop', 'scan_1'])

        self.assertFalse(success)

        self.assertEqual(mock_logger.warning.call_count, 1)
Ejemplo n.º 18
0
 def Rsync(
     src: str,
     dst: str,
     host_port: int,
     excludes: typing.List[str],
     dry_run: bool,
     verbose: bool,
     delete: bool,
     progress: bool,
 ):
     """Private helper method to invoke rsync with appropriate arguments."""
     cmd = [
         "rsync",
         "-ah",
         str(src),
         str(dst),
         "-e",
         f"ssh -p {host_port}",
     ] + labtypes.flatten([["--exclude", p] for p in excludes])
     if dry_run:
         cmd.append("--dry-run")
     if verbose:
         cmd.append("--verbose")
     if delete:
         cmd.append("--delete")
     if progress:
         cmd.append("--progress")
     app.Log(1, " ".join(cmd))
     p = subprocess.Popen(cmd)
     p.communicate()
     if p.returncode:
         raise subprocess.SubprocessError(
             f"rsync failed with returncode {p.returncode}")
Ejemplo n.º 19
0
    def wipe_sdcard(self):

        args = [
            str(Setting.installer_binary_path),
            "wipe",
            "--sdcard",
            str(Setting.usb_path),
        ]

        self.logger.info("Starting {args}\n".format(args=" ".join(args)))

        log_fd = open(str(self.wipe_log_path), "w")
        ps = subprocess.run(
            args=args,
            stdout=log_fd,
            stderr=subprocess.STDOUT,
            universal_newlines=True,
            close_fds=True,
        )

        if ps.returncode == 0:
            self.logger.info("wipe ran successfuly.")
        else:
            self.logger.error("wipe failed: {}".format(ps.returncode))

        self.logger.info("collecting full terminated log")
        self.logs["wipe_log"] = self.read_log(self.wipe_log_path)

        if ps.returncode != 0:
            raise subprocess.SubprocessError("wipe rc: {}".format(
                ps.returncode))
Ejemplo n.º 20
0
    def create(self, input_file: Path):
        """Function for making local blast database.

        This function creates database from tRNAs retrieved from tRNAscan-SE.

        Args:
            input_file (Path): Path to file containing DB sequences.
        Returns:
            str: Database output.
        Creates:
            (*.nhr, *.nin, *.nsq): Created database's files in LMBD format.
        Raises:
            SubprocessError: When makeblastdb returns error or when input file does not exist.
        """

        try:
            cmd = NcbimakeblastdbCommandline(input_file=str(input_file),
                                             dbtype="nucl",
                                             title=self.name,
                                             out=self.name)
            cmd()
            makeblastdb_output = subprocess.run(str(cmd),
                                                capture_output=True,
                                                shell=True)
            if makeblastdb_output.stderr:
                raise subprocess.SubprocessError(
                    f"Makeblastdb returned error: {makeblastdb_output.stderr.decode()}"
                )
        except Exception:
            raise
        finally:
            if input_file.exists():
                input_file.unlink()
        return makeblastdb_output.stdout.decode()
Ejemplo n.º 21
0
def sub_run(*args, check=True, capture_output=True, text=True, **kwargs):
    """`subprocess.run`, with other defaults, and return stdout.

    Example:
    >>> gitfiles = sub_run(["git", "ls-tree", "-r", "--name-only", "HEAD"])
    >>> # Alternatively:
    >>> # gitfiles = sub_run("git ls-tree -r --name-only HEAD", shell=True)
    >>> # Only .py files:
    >>> gitfiles = [f for f in gitfiles.split("\n") if f.endswith(".py")]
    """

    try:
        x = subprocess.run(*args,
                           **kwargs,
                           check=check,
                           capture_output=capture_output,
                           text=text)

    except subprocess.CalledProcessError as error:
        # CalledProcessError doesnt print its .stderr,
        # so we raise it this way:
        raise subprocess.SubprocessError(
            f"Command {error.cmd} returned non-zero exit status, "
            f"with stderr:\n{error.stderr}") from error

    if capture_output:
        return x.stdout
Ejemplo n.º 22
0
    def create(self) -> tuple:
        """Function for making local blast database.

        This function creates database from files found in source_dir.

        Returns:
            tuple(Database, str): Database object, output from makeblastdb.
        Creates:
            (*.nhr, *.nin, *.nsq): Created database's files in LMBD format.
        Raises:
            SubprocessError: When makeblastdb returns error or when input file does not exist.
        """

        self._aggregate(self.source_dir, Path("blast_input.fasta"))
        try:
            cmd = NcbimakeblastdbCommandline(input_file="blast_input.fasta",
                                             dbtype="nucl",
                                             title=self.name,
                                             out=self.name)
            makeblastdb_output = subprocess.run(str(cmd),
                                                capture_output=True,
                                                shell=True)
            if makeblastdb_output.stderr:
                raise subprocess.SubprocessError(
                    f"Makeblastdb returned error: {makeblastdb_output.stderr.decode()}"
                )
        except Exception:
            raise
        finally:
            if Path("blast_input.fasta").exists():
                Path("blast_input.fasta").unlink()
            return self, makeblastdb_output.stdout.decode()
Ejemplo n.º 23
0
    def query(self, query_dir: Path, config: dict, blast_format: str,
              headers: tuple) -> pd.DataFrame:
        """This function queries content of the directory to created database

        Args:
            query_dir (Path): Directory containing query files.
            config (dict): Blast configuration dict.
            blast_format (str): Blast output format.
            headers ( tuple(*str) ): Headers matching blast output for final DataFrame.
        Raises:
            TypeError: When given obj is of wrong type.
            FileNotFoundError: When given path does not exist or when given path is not a directory.
            ValueError: When forbidden blast option was provided.
        Returns:
            (pd.DataFrame): Pandas DataFrame containing query results.
        """

        if not isinstance(query_dir, Path):
            raise TypeError("Given object is not Path object")
        if not query_dir.exists():
            raise FileNotFoundError("Given path does not exist")
        if not query_dir.is_dir():
            raise FileNotFoundError("Given path is not directory")

        if not isinstance(config, dict):
            raise TypeError("Config file is not a dict object")
        if any(kwarg in ('query', 'db', 'outfmt', 'max_target_seqs',
                         'num_alignments') for kwarg in config.keys()):
            used = filter(
                lambda k: k in config.keys(),
                ('query', 'db', 'outfmt', 'max_target_seqs', 'num_alignments'))
            raise ValueError(
                "Given kwargs are not valid in terms of blast usage",
                list(used))

        self._aggregate(query_dir, Path("blast_query.fasta"))
        try:
            cmd = NcbiblastnCommandline(query="blast_query.fasta",
                                        db=f"{self.name}",
                                        outfmt=blast_format,
                                        **config)
            blastn_output = subprocess.run(str(cmd),
                                           capture_output=True,
                                           shell=True)

            # Error only occurs if it's not this stupid warning.
            if blastn_output.stderr and "Examining 5 or more matches" not in blastn_output.stderr.decode(
            ):
                raise subprocess.SubprocessError(
                    f"Blastn returned error: {blastn_output.stderr.decode()}")
        except Exception:
            raise
        finally:
            if Path("blast_query.fasta").exists():
                Path("blast_query.fasta").unlink()
        results_df: pd.DataFrame = pd.read_csv(io.StringIO(
            blastn_output.stdout.decode()),
                                               header=None,
                                               names=headers)
        return results_df
Ejemplo n.º 24
0
def run_command(cmd: List[str], cwd=None, allow_verbose: bool = False) -> Tuple[Optional[str], Optional[str]]:
    """
    Run arbitrary command.

    :param cmd: the command to run (list of command and arguments)
    :param cwd: if specified, use this path as working directory (otherwise current working directory is used)
    :param allow_verbose: if true, redirect command output to stdout (WARNING, causes return values to be None)
    :return: command output and error output (if not (allow_verbose and cfg.verbosity))
    """

    if cwd is not None:
        cwd = os.path.abspath(cwd)

    if allow_verbose and cfg.verbosity >= 2 and not cfg.is_unit_test:
        process = subprocess.Popen(cmd, cwd=cwd)
        output, error = process.communicate() # will be None
    else:
        # run
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)

        # collect output
        output, error = process.communicate()

        # decode output
        output = output.decode('utf-8').rstrip()
        error = error.decode('utf-8').rstrip()

    # check for error
    if process.returncode != 0:
        cmd = get_command(cmd)
        msg = f"Non-zero exit status {process.returncode} for command:\n{cwd}: $ {cmd}\n\n{output}\n{error}"
        raise subprocess.SubprocessError(msg)

    return output, error
Ejemplo n.º 25
0
    def test_start_scan_error(self, mock_popen: MagicMock,
                              mock_logger: MagicMock):
        mock_popen.side_effect = subprocess.SubprocessError('foo')

        proc = Openvas.start_scan('scan_1')

        mock_popen.assert_called_with(['openvas', '--scan-start', 'scan_1'],
                                      shell=False)

        self.assertIsNone(proc)

        self.assertEqual(mock_logger.warning.call_count, 1)

        mock_popen.reset_mock()
        mock_logger.reset_mock()

        mock_popen.side_effect = OSError('foo')

        proc = Openvas.start_scan('scan_1')

        mock_popen.assert_called_with(['openvas', '--scan-start', 'scan_1'],
                                      shell=False)

        self.assertIsNone(proc)

        self.assertEqual(mock_logger.warning.call_count, 1)
Ejemplo n.º 26
0
def RunCommand(command: List[str], msg: str) -> str:
    """Runs a command and returns the standard output.

  Args:
    command (List[str]): The list of command chunks to use in subprocess.run.
        ex: ['git', 'grep', 'cat'] to find all instances of cat in a repo.
    msg (str): An error message in case the subprocess fails for some reason.

  Raises:
    subprocess.SubprocessError: Raises this with the command that failed in the
        event that the return code of the process is non-zero.

  Returns:
    str: the standard output of the subprocess.
  """
    command = [piece for piece in command if piece != ""]
    proc = subprocess.run(command,
                          stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE,
                          stdin=subprocess.DEVNULL)
    out = proc.stdout.decode("utf-8", errors="ignore")
    err = proc.stderr.decode("utf-8", errors="ignore")
    if proc.returncode != 0:
        sys.stderr.write("{}\nreturn code: {}\nstdout: {}\nstderr: {}".format(
            msg, proc.returncode, out, err))
        raise subprocess.SubprocessError(
            "Command failed to complete successfully. {}".format(command))
    return out
Ejemplo n.º 27
0
async def test_switch_command_state_value_exceptions(
        caplog: Any, hass: HomeAssistant) -> None:
    """Test that switch state value exceptions are handled correctly."""

    with patch(
            "homeassistant.components.command_line.subprocess.check_output",
            side_effect=[
                subprocess.TimeoutExpired("cmd", 10),
                subprocess.SubprocessError(),
            ],
    ) as check_output:
        await setup_test_entity(
            hass,
            {
                "test": {
                    "command_on": "exit 0",
                    "command_off": "exit 0'",
                    "command_state": "echo 1",
                    "value_template": '{{ value=="1" }}',
                }
            },
        )
        async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL)
        await hass.async_block_till_done()
        assert check_output.call_count == 1
        assert "Timeout for command" in caplog.text

        async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL * 2)
        await hass.async_block_till_done()
        assert check_output.call_count == 2
        assert "Error trying to exec command" in caplog.text
Ejemplo n.º 28
0
    def configure(self, **kwargs: Optional[Dict]) -> None:
        """Setup the FakePopen instance based on a real Popen arguments."""
        self.__universal_newlines = kwargs.get("universal_newlines", None)
        text = kwargs.get("text", None)
        encoding = kwargs.get("encoding", None)
        errors = kwargs.get("errors", None)

        if text and sys.version_info < (3, 7):
            raise TypeError(
                "__init__() got an unexpected keyword argument 'text'")

        self.text_mode = bool(text or self.__universal_newlines or encoding
                              or errors)

        # validation taken from the real subprocess
        if (text is not None and self.__universal_newlines is not None
                and bool(self.__universal_newlines) != bool(text)):
            raise subprocess.SubprocessError(
                "Cannot disambiguate when both text "
                "and universal_newlines are supplied but "
                "different. Pass one or the other.")

        stdout = kwargs.get("stdout")
        if stdout == subprocess.PIPE:
            self.stdout = self._prepare_buffer(self.__stdout)
        elif isinstance(stdout, (io.BufferedWriter, io.TextIOWrapper)):
            self._write_to_buffer(self.__stdout, stdout)
        stderr = kwargs.get("stderr")
        if stderr == subprocess.STDOUT and self.__stderr:
            self.stdout = self._prepare_buffer(self.__stderr, self.stdout)
        elif stderr == subprocess.PIPE:
            self.stderr = self._prepare_buffer(self.__stderr)
        elif isinstance(stderr, (io.BufferedWriter, io.TextIOWrapper)):
            self._write_to_buffer(self.__stderr, stderr)
Ejemplo n.º 29
0
 def test__bdss_done(self):
     with mock.patch("force_wfmanager.wfmanager_setup_task.information"
                     ) as mock_info:
         exception = subprocess.SubprocessError()
         self.setup_task._bdss_done(exception)
         mock_info.assert_called_with(
             None, "Execution of BDSS stopped by the user.")
Ejemplo n.º 30
0
 def Rsync(src: str, dst: str, host_port: int, excludes: typing.List[str],
           dry_run: bool, verbose: bool, delete: bool, progress: bool):
     """Private helper method to invoke rsync with appropriate arguments."""
     cmd = [
         'rsync',
         '-ah',
         str(src),
         str(dst),
         '-e',
         f'ssh -p {host_port}',
     ] + labtypes.flatten([['--exclude', p] for p in excludes])
     if dry_run:
         cmd.append('--dry-run')
     if verbose:
         cmd.append('--verbose')
     if delete:
         cmd.append('--delete')
     if progress:
         cmd.append('--progress')
     logging.info(' '.join(cmd))
     p = subprocess.Popen(cmd)
     p.communicate()
     if p.returncode:
         raise subprocess.SubprocessError(
             f'rsync failed with returncode {p.returncode}')