def code(self): try: from pygments.lexers import get_lexer_by_name from pygments.formatters import HtmlFormatter from pygments import highlight import pygments.util except ImportError: raise WorkflowError( "Python package pygments must be installed to create reports.") sources, language = None, None if self._rule.shellcmd is not None: sources = [self._rule.shellcmd] language = "bash" elif self._rule.script is not None and not contains_wildcard( self._rule.script): logger.info("Loading script code for rule {}".format(self.name)) _, source, language = script.get_source(self._rule.script, self._rule.basedir) sources = [source.decode()] elif self._rule.wrapper is not None and not contains_wildcard( self._rule.wrapper): logger.info("Loading wrapper code for rule {}".format(self.name)) _, source, language = script.get_source( wrapper.get_script(self._rule.wrapper, prefix=self._rule.workflow.wrapper_prefix)) sources = [source.decode()] elif self._rule.notebook is not None and not contains_wildcard( self._rule.notebook): _, source, language = script.get_source(self._rule.notebook, self._rule.basedir) language = language.split("_")[1] sources = notebook.get_cell_sources(source) else: # A run directive. There is no easy way yet to obtain # the actual uncompiled source code. sources = [] language = "python" try: lexer = get_lexer_by_name(language) highlighted = [ highlight( source, lexer, HtmlFormatter(linenos=True, cssclass="source", wrapcode=True), ) for source in sources ] return highlighted except pygments.util.ClassNotFound: return [ '<pre class="source"><code>{}</code></pre>'.format(source) for source in sources ]
def contains_wildcard(self): return contains_wildcard(self.file)
def contains_wildcard(self): return contains_wildcard(self.name)
def containerize(workflow): if any( contains_wildcard(rule.conda_env) for rule in workflow.rules if rule.conda_env is not None): raise WorkflowError( "Containerization of conda based workflows is not allowed if any conda env definition contains a wildcard." ) relfile = lambda env: os.path.relpath(env.file, os.getcwd()) envs = sorted( set( conda.Env(rule.conda_env, workflow, env_dir=CONDA_ENV_PATH) for rule in workflow.rules if rule.conda_env is not None), key=relfile, ) envhash = hashlib.sha256() for env in envs: logger.info("Hashing conda environment {}.".format(relfile(env))) # build a hash of the environment contents envhash.update(env.content) print("FROM condaforge/mambaforge:latest") print('LABEL io.github.snakemake.containerized="true"') print('LABEL io.github.snakemake.conda_env_hash="{}"'.format( envhash.hexdigest())) generated = set() get_env_cmds = [] generate_env_cmds = [] for env in envs: if env.content_hash in generated: # another conda env with the same content was generated before continue prefix = Path(CONDA_ENV_PATH) / env.content_hash env_source_path = relfile(env) env_target_path = prefix / "environment.yaml" get_env_cmds.append("\n# Conda environment:") get_env_cmds.append("# source: {}".format(env_source_path)) get_env_cmds.append("# prefix: {}".format(prefix)) get_env_cmds.append("\n".join( map("# {}".format, env.content.decode().strip().split("\n")))) get_env_cmds.append("RUN mkdir -p {}".format(prefix)) if env.file.startswith("https://"): # get_env_cmds.append("RUN curl -sSL {} > {}".format(env.file, env_target_path)) get_env_cmds.append("ADD {} {}".format(env.file, env_target_path)) else: get_env_cmds.append("COPY {} {}".format(env_source_path, env_target_path)) generate_env_cmds.append( "mamba env create --prefix {} --file {} &&".format( prefix, env_target_path)) generated.add(env.content_hash) print("\n# Step 1: Retrieve conda environments") for cmd in get_env_cmds: print(cmd) print("\n# Step 2: Generate conda environments") print("\nRUN", " \\\n ".join(generate_env_cmds), "\\\n mamba clean --all -y")