Ejemplo n.º 1
0
def conf_logs(request):
    """ Configure logging for the testing session. """
    level = request.config.getoption("--logging-level")
    init_logger(name=_LOGNAME, level=level, devmode=True)
    logging.getLogger(_LOGNAME).info(
        "Configured looper logger at level %s; attaching oldtests' logger %s",
        str(level), __name__)
    global _LOGGER
    _LOGGER = logging.getLogger("looper.{}".format(__name__))
Ejemplo n.º 2
0
def interactive(prj_lines=PROJECT_CONFIG_LINES,
                iface_lines=PIPELINE_INTERFACE_CONFIG_LINES,
                merge_table_lines=MERGE_TABLE_LINES,
                annotation_lines=SAMPLE_ANNOTATION_LINES,
                project_kwargs=None,
                logger_kwargs=None):
    """
    Create Project and PipelineInterface instances from default or given data.

    This is intended to provide easy access to instances of fundamental looper
    object for interactive test-authorship-motivated work in an iPython
    interpreter or Notebook. Test authorship is simplified if we provide
    easy access to viable instances of these objects.

    :param Iterable[str] prj_lines: project config lines
    :param Iterable[str] iface_lines: pipeline interface config lines
    :param Iterable[str] merge_table_lines: lines for a merge table file
    :param Iterable[str] annotation_lines: lines for a sample annotations file
    :param str | int loglevel: level at which to attend to log messages
    :param dict project_kwargs: keyword arguments for Project constructor
    :param dict logger_kwargs: keyword arguments for logging configuration
    :param bool devmode: whether logging should be done in development mode;
        this implies a more verbose level setting and a more information-rich
        template for logging message formatting
    :param str logfile: path to file to which to write logging messages
    :return Project, PipelineInterface: one Project and one PipelineInterface,
    """

    # Establish logging for interactive session.
    looper_logger_kwargs = {"level": "DEBUG", "name": "looper"}
    looper_logger_kwargs.update(logger_kwargs or {})
    init_logger(**looper_logger_kwargs)

    # TODO: don't work with tempfiles once ctors tolerate Iterable.
    dirpath = tempfile.mkdtemp()
    path_conf_file = write_temp(prj_lines,
                                dirpath=dirpath,
                                fname=P_CONFIG_FILENAME)
    path_iface_file = write_temp(iface_lines,
                                 dirpath=dirpath,
                                 fname="pipeline_interface.yaml")
    path_merge_table_file = write_temp(merge_table_lines,
                                       dirpath=dirpath,
                                       fname=MERGE_TABLE_FILENAME)
    path_sample_annotation_file = write_temp(annotation_lines,
                                             dirpath=dirpath,
                                             fname=ANNOTATIONS_FILENAME)

    prj = Project(path_conf_file, **(project_kwargs or {}))
    iface = PipelineInterface(path_iface_file)
    for path in [
            path_conf_file, path_iface_file, path_merge_table_file,
            path_sample_annotation_file
    ]:
        os.unlink(path)
    return prj, iface
Ejemplo n.º 3
0
def conf_logs(request):
    """ Configure logging for the testing session. """
    level = request.config.getoption("--logging-level")
    logname = "peppy"
    init_logger(name=logname, level=level, devmode=True)
    logging.getLogger(logname).info(
        "Configured pep logger at level %s; attaching tests' logger %s",
        str(level), __name__)
    global _LOGGER
    _LOGGER = logging.getLogger("peppy.{}".format(__name__))
Ejemplo n.º 4
0
def est_log(**kwargs):
    """
    Establish logging, e.g. for an interactive session.

    :param dict kwargs: keyword arguments for logger setup.
    :return logging.Logger: looper logger
    """
    kwds = copy.copy(kwargs)
    if "name" in kwds:
        print("Ignoring {} and setting fixed values for logging names".format(
            kwds["name"]))
        del kwds["name"]
    init_logger(name="peppy", **kwds)
    return init_logger(name="looper", **kwds)
Ejemplo n.º 5
0
def interactive(prj_lines=PROJECT_CONFIG_LINES,
                iface_lines=PIPELINE_INTERFACE_CONFIG_LINES,
                sample_subannotation_lines=SAMPLE_SUBANNOTATION_LINES,
                annotation_lines=SAMPLE_ANNOTATION_LINES,
                project_kwargs=None,
                logger_kwargs=None):
    """
    Create Project instance from default or given data.

    This is intended to provide easy access to instances of fundamental pep
    object for interactive test-authorship-motivated work in an iPython
    interpreter or Notebook. Test authorship is simplified if we provide
    easy access to viable instances of these objects.

    :param Iterable[str] prj_lines: project config lines
    :param Iterable[str] iface_lines: pipeline interface config lines
    :param Iterable[str] sample_subannotation_lines: lines for a merge table file
    :param Iterable[str] annotation_lines: lines for a sample annotations file
    :param dict project_kwargs: keyword arguments for Project constructor
    :param dict logger_kwargs: keyword arguments for logging configuration
    :return Project: configured Project
    """

    # Establish logging for interactive session.
    pep_logger_kwargs = {"level": "DEBUG", "name": "peppy"}
    pep_logger_kwargs.update(logger_kwargs or {})
    init_logger(**pep_logger_kwargs)

    # TODO: don't work with tempfiles once ctors tolerate Iterable.
    dirpath = tempfile.mkdtemp()
    path_conf_file = _write_temp(prj_lines,
                                 dirpath=dirpath,
                                 fname=P_CONFIG_FILENAME)
    path_iface_file = _write_temp(iface_lines,
                                  dirpath=dirpath,
                                  fname="pipeline_interface.yaml")
    path_sample_subannotation_file = _write_temp(sample_subannotation_lines,
                                                 dirpath=dirpath,
                                                 fname=SUBSAMPLES_FILENAME)
    path_sample_annotation_file = _write_temp(annotation_lines,
                                              dirpath=dirpath,
                                              fname=ANNOTATIONS_FILENAME)

    prj = Project(path_conf_file, **(project_kwargs or {}))
    for path in [
            path_conf_file, path_iface_file, path_sample_subannotation_file,
            path_sample_annotation_file
    ]:
        os.unlink(path)
    return prj
Ejemplo n.º 6
0
def test_logfile(tmpdir, filename):
    """ Validate file handler setting for created logger. """
    fp = tmpdir.join(filename).strpath
    log = init_logger(logfile=fp)
    assert 1 == len(log.handlers)
    h = _check_hdlr_kind(log, logging.FileHandler)
    assert fp == h.stream.name
Ejemplo n.º 7
0
def test_logfile_and_stream(filename, stream, tmpdir):
    """ Logging can be both stream and file. """
    fp = tmpdir.join(filename).strpath
    log = init_logger(logfile=fp, stream=stream)
    assert 2 == len(log.handlers)
    fh = _check_hdlr_kind(log, logging.FileHandler)
    sh = _check_hdlr_kind(log, logging.StreamHandler, omit=logging.FileHandler)
    assert fp == fh.stream.name
    assert stream == sh.stream
Ejemplo n.º 8
0
def main():
    """ Primary workflow """
    parser = build_argparser()
    args, remaining_args = parser.parse_known_args()

    if args.command is None:
        parser.print_help(sys.stderr)
        sys.exit(1)
    
    # Set the logging level.
    if args.dbg:
        # Debug mode takes precedence and will listen for all messages.
        level = args.logging_level or logging.DEBUG
    elif args.verbosity is not None:
        # Verbosity-framed specification trumps logging_level.
        level = _LEVEL_BY_VERBOSITY[args.verbosity]
    else:
        # Normally, we're not in debug mode, and there's not verbosity.
        level = LOGGING_LEVEL

    logger_kwargs = {"level": level, "devmode": args.dbg}
    init_logger(name="peppy", **logger_kwargs)
    global _LOGGER
    _LOGGER = init_logger(name=PKG_NAME, **logger_kwargs)

    if args.command == "convert":
        _LOGGER.debug("Creating a Project object from: {}".format(args.pep))
        p = Project(args.pep)
        convert_project(p, args.format)
        _LOGGER.info("Conversion successful")
        sys.exit(0)

    if args.command == "list":
        list_formats()
        sys.exit(0)

    sys.exit(0)
Ejemplo n.º 9
0
def test_all_defaults(attr, check):
    """ Check the values on the logger that result from all default arguments. """
    logger = init_logger()
    if hasattr(check, "__call__"):
        fails = list(
            itertools.chain(*[check(obj) for obj in getattr(logger, attr)]))
        if fails:
            pytest.fail("Failures:\n{}".format("\n".join(fails)))
    else:
        try:
            exp, err_fmt = check
        except ValueError:
            exp, err_fmt, transform = check
        else:
            transform = None
        obs = getattr(logger, attr)
        obs = transform(obs) if transform else obs
        assert exp == obs, err_fmt.format(exp, obs)
Ejemplo n.º 10
0
def main():
    """Primary workflow"""

    parser = logmuse.add_logging_options(build_argparser())
    # args, remaining_args = parser.parse_known_args()
    args = parser.parse_args()

    logger_kwargs = {"level": args.verbosity, "devmode": args.logdev}
    logmuse.init_logger("yacman", **logger_kwargs)
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args)

    if not args.command:
        parser.print_help()
        _LOGGER.error("No command given")
        sys.exit(1)

    if args.command == "init":
        divcfg = args.config
        _LOGGER.debug("Initializing divvy configuration")
        is_writable(os.path.dirname(divcfg), check_exist=False)
        divvy_init(divcfg, DEFAULT_CONFIG_FILEPATH)
        sys.exit(0)

    _LOGGER.debug("Divvy config: {}".format(args.config))
    divcfg = select_divvy_config(args.config)
    _LOGGER.info("Using divvy config: {}".format(divcfg))
    dcc = ComputingConfiguration(filepath=divcfg)

    if args.command == "list":
        # Output header via logger and content via print so the user can
        # redirect the list from stdout if desired without the header as clutter
        _LOGGER.info("Available compute packages:\n")
        print("{}".format("\n".join(dcc.list_compute_packages())))
        sys.exit(1)

    # Any non-divvy arguments will be passed along as key-value pairs
    # that can be used to populate the template.
    # keys = [str.replace(x, "--", "") for x in remaining_args[::2]]
    # cli_vars = dict(zip(keys, remaining_args[1::2]))
    if args.compute:
        cli_vars = {y[0]: y[1] for y in [x.split("=") for x in args.compute]}
    else:
        cli_vars = {}

    if args.command == "write" or args.command == "submit":
        try:
            dcc.activate_package(args.package)
        except AttributeError:
            parser.print_help(sys.stderr)
            sys.exit(1)

        if args.settings:
            _LOGGER.info("Loading settings file: %s", args.settings)
            with open(args.settings, "r") as f:
                vars_groups = [cli_vars, yaml.load(f, SafeLoader)]
        else:
            vars_groups = [cli_vars]

        _LOGGER.debug(vars_groups)
        if args.command == "write":
            dcc.write_script(args.outfile, vars_groups)
        elif args.command == "submit":
            dcc.submit(args.outfile, vars_groups)
Ejemplo n.º 11
0
# Project configuration, particularly for logging.

import logmuse
from ._version import __version__
from .henge import *

__classes__ = ["Henge"]
__all__ = __classes__ + ["connect_mongo", "split_schema", "NotFoundException"]

logmuse.init_logger("henge", propagate=True)
Ejemplo n.º 12
0
# Project configuration, particularly for logging.

import logmuse
from ._version import __version__

__classes__ = []
__all__ = __classes__ + []

logmuse.init_logger("bulker")
Ejemplo n.º 13
0
# Project configuration, particularly for logging.

import logmuse
from ._version import __version__

__classes__ = []
__all__ = __classes__ + []

logmuse.init_logger("packagename")
Ejemplo n.º 14
0
import logmuse
logmuse.init_logger("seqcolapi")
Ejemplo n.º 15
0
def main():
    """ Primary workflow """

    parser = logmuse.add_logging_options(build_argparser())
    args, remaining_args = parser.parse_known_args()
    logger_kwargs = {"level": args.verbosity, "devmode": args.logdev}
    logmuse.init_logger(name="yacman", **logger_kwargs)
    global _LOGGER
    _LOGGER = logmuse.logger_via_cli(args)

    _LOGGER.debug("Command given: {}".format(args.command))

    if not args.command:
        parser.print_help()
        _LOGGER.error("No command given")
        sys.exit(1)

    if args.command == "init":
        bulkercfg = args.config
        _LOGGER.debug("Initializing bulker configuration")
        _is_writable(os.path.dirname(bulkercfg), check_exist=False)
        bulker_init(bulkercfg, DEFAULT_CONFIG_FILEPATH, args.engine)
        sys.exit(0)

    bulkercfg = select_bulker_config(args.config)
    bulker_config = yacman.YacAttMap(filepath=bulkercfg, writable=False)

    if args.command == "list":
        # Output header via logger and content via print so the user can
        # redirect the list from stdout if desired without the header as clutter

        if args.simple:
            fmt = "{namespace}/{crate}:{tag}"
        else:
            _LOGGER.info("Available crates:")
            fmt = "{namespace}/{crate}:{tag} -- {path}"

        if bulker_config.bulker.crates:
            for namespace, crates in bulker_config.bulker.crates.items():
                for crate, tags in crates.items():
                    for tag, path in tags.items():
                        print(
                            fmt.format(namespace=namespace,
                                       crate=crate,
                                       tag=tag,
                                       path=path))
        else:
            _LOGGER.info(
                "No crates available. Use 'bulker load' to load a crate.")
        sys.exit(1)

    # For all remaining commands we need a crate identifier

    _LOGGER.info("Bulker config: {}".format(bulkercfg))
    if args.command == "activate":
        try:
            cratelist = parse_registry_paths(
                args.crate_registry_paths,
                bulker_config.bulker.default_namespace)
            _LOGGER.debug(cratelist)
            _LOGGER.info("Activating bulker crate: {}{}".format(
                args.crate_registry_paths, " (Strict)" if args.strict else ""))
            bulker_activate(bulker_config,
                            cratelist,
                            echo=args.echo,
                            strict=args.strict,
                            prompt=args.no_prompt)
        except KeyError as e:
            parser.print_help(sys.stderr)
            _LOGGER.error("{} is not an available crate".format(e))
            sys.exit(1)
        except MissingCrateError as e:
            _LOGGER.error("Missing crate: {}".format(e))
            sys.exit(1)
        except AttributeError as e:
            _LOGGER.error(
                "Your bulker config file is outdated, you need to re-initialize it: {}"
                .format(e))
            sys.exit(1)

    if args.command == "run":
        try:
            cratelist = parse_registry_paths(args.crate_registry_paths)
            _LOGGER.info("Activating crate: {}\n".format(
                args.crate_registry_paths))
            bulker_run(bulker_config, cratelist, args.cmd, strict=args.strict)
        except KeyError as e:
            parser.print_help(sys.stderr)
            _LOGGER.error("{} is not an available crate".format(e))
            sys.exit(1)
        except MissingCrateError as e:
            _LOGGER.error("Missing crate: {}".format(e))
            sys.exit(1)

    if args.command == "load":
        bulker_config.make_writable()
        manifest, cratevars = load_remote_registry_path(
            bulker_config, args.crate_registry_paths, args.manifest)
        exe_template_jinja = None
        build_template_jinja = None
        shell_template_jinja = None

        exe_template = mkabs(bulker_config.bulker.executable_template,
                             os.path.dirname(bulker_config._file_path))
        build_template = mkabs(bulker_config.bulker.build_template,
                               os.path.dirname(bulker_config._file_path))
        try:
            shell_template = mkabs(bulker_config.bulker.shell_template,
                                   os.path.dirname(bulker_config._file_path))
        except AttributeError:
            _LOGGER.error(
                "You need to re-initialize your bulker config or add a 'shell_template' attribute."
            )
            sys.exit(1)

        try:
            assert (os.path.exists(exe_template))
        except AssertionError:
            _LOGGER.error(
                "Bulker config points to a missing executable template: {}".
                format(exe_template))
            sys.exit(1)

        with open(exe_template, 'r') as f:
            # with open(DOCKER_TEMPLATE, 'r') as f:
            contents = f.read()
            exe_template_jinja = jinja2.Template(contents)

        try:
            assert (os.path.exists(shell_template))
        except AssertionError:
            _LOGGER.error(
                "Bulker config points to a missing shell template: {}".format(
                    shell_template))
            sys.exit(1)

        with open(shell_template, 'r') as f:
            # with open(DOCKER_TEMPLATE, 'r') as f:
            contents = f.read()
            shell_template_jinja = jinja2.Template(contents)

        if args.build:
            try:
                assert (os.path.exists(build_template))
            except AssertionError:
                _LOGGER.error(
                    "Bulker config points to a missing build template: {}".
                    format(build_template))
                sys.exit(1)

            _LOGGER.info(
                "Building images with template: {}".format(build_template))
            with open(build_template, 'r') as f:
                contents = f.read()
                build_template_jinja = jinja2.Template(contents)

        bulker_load(manifest,
                    cratevars,
                    bulker_config,
                    exe_jinja2_template=exe_template_jinja,
                    shell_jinja2_template=shell_template_jinja,
                    crate_path=args.path,
                    build=build_template_jinja,
                    force=args.force)

    if args.command == "inspect":
        if args.crate_registry_paths == "":
            _LOGGER.error(
                "No active create. Inspect requires a provided crate, or a currently active create."
            )
            sys.exit(1)
        manifest, cratevars = load_remote_registry_path(
            bulker_config, args.crate_registry_paths, None)
        manifest_name = cratevars['crate']

        print("Bulker manifest: {}".format(args.crate_registry_paths))
        crate_path = os.path.join(bulker_config.bulker.default_crate_folder,
                                  cratevars['namespace'], manifest_name,
                                  cratevars['tag'])
        if not os.path.isabs(crate_path):
            crate_path = os.path.join(os.path.dirname(bcfg._file_path),
                                      crate_path)
        print("Crate path: {}".format(crate_path))
        import glob
        filenames = glob.glob(os.path.join(crate_path, "*"))
        available_commands = [
            x for x in [os.path.basename(x) for x in filenames] if x[0] != "_"
        ]
        print("Available commands: {}".format(available_commands))
Ejemplo n.º 16
0
python -c "import logmuse
print(logmuse.LEVEL_BY_VERBOSITY)"





parser = logmuse.add_logmuse_args(parser)
args = parser.parse_args()

lmargs = logmuse.retrieve_logmuse_args(args)


global _LOGGER 
_LOGGER = logmuse.init_logger(lmargs)
logmuse.init_logger(name="pararead", lmargs)



non-CLI packages like pypiper and pararead should not need to know about logmuse,
but when a CLI tool (like a pipeline, or a count_reads tool) imports one of these with a logger, and uses logmuse, it should "just work".




file = "../microtest/data/bs_aln_k1k3.bam"
Ejemplo n.º 17
0
import logmuse

from ._version import __version__

logmuse.init_logger("refgenie")
Ejemplo n.º 18
0
# Project configuration, particularly for logging.

import logmuse

from ._version import __version__
from .bbconf import *
from .const import *

__classes__ = ["BedBaseConf"]
__all__ = __classes__ + ["get_bedbase_cfg"]

logmuse.init_logger("bbconf")
Ejemplo n.º 19
0
# Project configuration, particularly for logging.

import logmuse
from ._version import __version__
from .pipestat import *
from .helpers import *

__classes__ = ["PipestatManager"]
__all__ = __classes__

logmuse.init_logger("pipestat")
Ejemplo n.º 20
0
# Project configuration, particularly for logging.

import logmuse

from ._version import __version__
from .helpers import *
from .pipestat import *

__classes__ = ["PipestatManager"]
__all__ = __classes__

logmuse.init_logger(PKG_NAME)
Ejemplo n.º 21
0
def test_propagate(kwargs, exp):
    """ Determination of propagation flag considers root status and propagation. """
    assert init_logger(**kwargs).propagate is exp
Ejemplo n.º 22
0
def test_stream(stream, exp):
    """ Validate stream handler setting for created logger. """
    log = init_logger(stream=stream)
    assert 1 == len(log.handlers)
    h = _check_hdlr_kind(log, logging.StreamHandler)
    assert exp == h.stream
Ejemplo n.º 23
0
"""Project configuration, particularly for logging.

Project-scope constants may reside here, but more importantly, some setup here
will provide a logging infrastructure for all of the project's modules.
Individual modules and classes may provide separate configuration on a more
local level, but this will at least provide a foundation.

"""

import logmuse
from ._version import __version__
from .compute import ComputingConfiguration, select_divvy_config
from .const import *
from .utils import write_submit_script

__classes__ = ["ComputingConfiguration"]
__functions__ = ["select_divvy_config"]
__all__ = __classes__ + __functions__ + [write_submit_script.__name__]

logmuse.init_logger("divvy")
Ejemplo n.º 24
0
def test_single_attr(att, val):
    """ Test successful setting of a simple, particular logger attribute. """
    assert val == getattr(init_logger(**{att: val}), att)
Ejemplo n.º 25
0
# Project configuration, particularly for logging.

import logmuse
from .bedshift import Bedshift
from ._version import __version__

__classes__ = ["Bedshift"]
__all__ = __classes__ + []

logmuse.init_logger("bedshift")
Ejemplo n.º 26
0
def test_make_non_root_name_root():
    """ Non-root name for root logger is prohibited. """
    with pytest.raises(ValueError):
        init_logger("root", make_root=False)
Ejemplo n.º 27
0
def test_make_root(make_root, exp):
    """ Root status for logger has a couple of implications. """
    log = init_logger(make_root=make_root)
    assert exp == log.name
    assert log.propagate is False
Ejemplo n.º 28
0
def main():
    """ Primary workflow """
    global _LOGGER
    parsers = build_parser()
    parser = parsers[0]
    aux_parser = parsers[1]
    aux_parser.suppress_defaults()
    args, remaining_args = parser.parse_known_args()
    if args.command is None:
        parser.print_help(sys.stderr)
        sys.exit(1)
    if args.config_file is None:
        m = "No project config defined"
        try:
            setattr(args, "config_file", read_cfg_from_dotfile())
        except OSError:
            print(m + " and dotfile does not exist: {}".format(dotfile_path()))
            parser.print_help(sys.stderr)
            sys.exit(1)
        else:
            print(m + ", using: {}. Read from dotfile ({}).".format(
                read_cfg_from_dotfile(), dotfile_path()))
    if args.command == "init":
        sys.exit(
            int(not init_dotfile(dotfile_path(), args.config_file, args.force))
        )
    args = enrich_args_via_cfg(args, aux_parser)

    # Set the logging level.
    if args.dbg:
        # Debug mode takes precedence and will listen for all messages.
        level = args.logging_level or logging.DEBUG
    elif args.verbosity is not None:
        # Verbosity-framed specification trumps logging_level.
        level = _LEVEL_BY_VERBOSITY[args.verbosity]
    else:
        # Normally, we're not in debug mode, and there's not verbosity.
        level = LOGGING_LEVEL

    # Establish the project-root logger and attach one for this module.
    logger_kwargs = {
        "level": level,
        "logfile": args.logfile,
        "devmode": args.dbg
    }
    init_logger(name="peppy", **logger_kwargs)
    init_logger(name="divvy", **logger_kwargs)
    init_logger(name="eido", **logger_kwargs)
    _LOGGER = init_logger(name=_PKGNAME, **logger_kwargs)

    # lc = LooperConfig(select_looper_config(filename=args.looper_config))
    # _LOGGER.debug("Determined genome config: {}".format(lc))

    _LOGGER.info("Looper version: {}\nCommand: {}".format(
        __version__, args.command))

    if len(remaining_args) > 0:
        _LOGGER.warning("Unrecognized arguments: {}".format(" ".join(
            [str(x) for x in remaining_args])))

    divcfg = select_divvy_config(filepath=args.divvy) \
        if hasattr(args, "divvy") else None

    # Initialize project
    _LOGGER.debug("Building Project")
    try:
        p = Project(config_file=args.config_file,
                    amendments=args.amend,
                    divcfg_path=divcfg,
                    runp=args.command == "runp",
                    **{
                        attr: getattr(args, attr)
                        for attr in CLI_PROJ_ATTRS if attr in args
                    })
    except yaml.parser.ParserError as e:
        _LOGGER.error("Project config parse failed -- {}".format(e))
        sys.exit(1)

    selected_compute_pkg = p.selected_compute_package \
                           or DEFAULT_COMPUTE_RESOURCES_NAME
    if p.dcc is not None and not p.dcc.activate_package(selected_compute_pkg):
        _LOGGER.info("Failed to activate '{}' computing package. "
                     "Using the default one".format(selected_compute_pkg))

    with ProjectContext(prj=p,
                        selector_attribute=args.sel_attr,
                        selector_include=args.sel_incl,
                        selector_exclude=args.sel_excl) as prj:

        if args.command in ["run", "rerun"]:
            run = Runner(prj)
            try:
                compute_kwargs = _proc_resources_spec(args)
                run(args, rerun=(args.command == "rerun"), **compute_kwargs)
            except IOError:
                _LOGGER.error("{} pipeline_interfaces: '{}'".format(
                    prj.__class__.__name__, prj.pipeline_interface_sources))
                raise

        if args.command == "runp":
            compute_kwargs = _proc_resources_spec(args)
            collate = Collator(prj)
            collate(args, **compute_kwargs)

        if args.command == "destroy":
            return Destroyer(prj)(args)

        if args.command == "table":
            Table(prj)()

        if args.command == "report":
            Report(prj)(args)

        if args.command == "check":
            Checker(prj)(flags=args.flags)

        if args.command == "clean":
            return Cleaner(prj)(args)

        if args.command == "inspect":
            inspect_project(p, args.snames, args.attr_limit)