def __init__( self, snakefile=None, jobscript=None, overwrite_shellcmd=None, overwrite_config=dict(), overwrite_workdir=None, overwrite_configfiles=None, overwrite_clusterconfig=dict(), overwrite_threads=dict(), config_args=None, debug=False, verbose=False, use_conda=False, conda_prefix=None, use_singularity=False, use_env_modules=False, singularity_prefix=None, singularity_args="", shadow_prefix=None, mode=Mode.default, wrapper_prefix=None, printshellcmds=False, restart_times=None, attempt=1, default_remote_provider=None, default_remote_prefix="", run_local=True, default_resources=None, cache=None, nodes=1, cores=1, resources=None, conda_cleanup_pkgs=False, ): """ Create the controller. """ self.global_resources = dict() if resources is None else resources self.global_resources["_cores"] = cores self.global_resources["_nodes"] = nodes self._rules = OrderedDict() self.first_rule = None self._workdir = None self.overwrite_workdir = overwrite_workdir self.workdir_init = os.path.abspath(os.curdir) self._ruleorder = Ruleorder() self._localrules = set() self.linemaps = dict() self.rule_count = 0 self.basedir = os.path.dirname(snakefile) self.snakefile = os.path.abspath(snakefile) self.included = [] self.included_stack = [] self.jobscript = jobscript self.persistence = None self.globals = globals() self._subworkflows = dict() self.overwrite_shellcmd = overwrite_shellcmd self.overwrite_config = overwrite_config self.overwrite_configfiles = overwrite_configfiles self.overwrite_clusterconfig = overwrite_clusterconfig self.overwrite_threads = overwrite_threads self.config_args = config_args self.immediate_submit = None self._onsuccess = lambda log: None self._onerror = lambda log: None self._onstart = lambda log: None self._wildcard_constraints = dict() self.debug = debug self.verbose = verbose self._rulecount = 0 self.use_conda = use_conda self.conda_prefix = conda_prefix self.use_singularity = use_singularity self.use_env_modules = use_env_modules self.singularity_prefix = singularity_prefix self.singularity_args = singularity_args self.shadow_prefix = shadow_prefix self.global_container_img = None self.mode = mode self.wrapper_prefix = wrapper_prefix self.printshellcmds = printshellcmds self.restart_times = restart_times self.attempt = attempt self.default_remote_provider = default_remote_provider self.default_remote_prefix = default_remote_prefix self.configfiles = [] self.run_local = run_local self.report_text = None self.conda_cleanup_pkgs = conda_cleanup_pkgs # environment variables to pass to jobs # These are defined via the "envvars:" syntax in the Snakefile itself self.envvars = set() self.enable_cache = False if cache is not None: self.enable_cache = True self.cache_rules = set(cache) if self.default_remote_provider is not None: self.output_file_cache = RemoteOutputFileCache( self.default_remote_provider) else: self.output_file_cache = LocalOutputFileCache() else: self.output_file_cache = None self.cache_rules = set() if default_resources is not None: self.default_resources = default_resources else: # only _cores and _nodes self.default_resources = DefaultResources() self.iocache = snakemake.io.IOCache() global config config = copy.deepcopy(self.overwrite_config) global cluster_config cluster_config = copy.deepcopy(self.overwrite_clusterconfig) global rules rules = Rules() global checkpoints checkpoints = Checkpoints()
def __init__(self, snakefile=None, jobscript=None, overwrite_shellcmd=None, overwrite_config=dict(), overwrite_workdir=None, overwrite_configfile=None, overwrite_clusterconfig=dict(), config_args=None, debug=False, use_conda=False, conda_prefix=None, use_singularity=False, singularity_prefix=None, singularity_args="", use_docker=False, docker_args="", shadow_prefix=None, mode=Mode.default, wrapper_prefix=None, printshellcmds=False, restart_times=None, attempt=1, default_remote_provider=None, default_remote_prefix="", run_local=True, default_resources=dict()): """ Create the controller. """ self._rules = OrderedDict() self.first_rule = None self._workdir = None self.overwrite_workdir = overwrite_workdir self.workdir_init = os.path.abspath(os.curdir) self._ruleorder = Ruleorder() self._localrules = set() self.linemaps = dict() self.rule_count = 0 self.basedir = os.path.dirname(snakefile) self.snakefile = os.path.abspath(snakefile) self.included = [] self.included_stack = [] self.jobscript = jobscript self.persistence = None self.global_resources = None self.globals = globals() self._subworkflows = dict() self.overwrite_shellcmd = overwrite_shellcmd self.overwrite_config = overwrite_config self.overwrite_configfile = overwrite_configfile self.overwrite_clusterconfig = overwrite_clusterconfig self.config_args = config_args self.immediate_submit = None self._onsuccess = lambda log: None self._onerror = lambda log: None self._onstart = lambda log: None self._wildcard_constraints = dict() self.debug = debug self._rulecount = 0 self.use_conda = use_conda self.conda_prefix = conda_prefix self.use_singularity = use_singularity self.singularity_prefix = singularity_prefix self.singularity_args = singularity_args self.use_docker = use_docker self.docker_args = docker_args self.shadow_prefix = shadow_prefix self.global_singularity_img = None self.mode = mode self.wrapper_prefix = wrapper_prefix self.printshellcmds = printshellcmds self.restart_times = restart_times self.attempt = attempt self.default_remote_provider = default_remote_provider self.default_remote_prefix = default_remote_prefix self.configfiles = [] self.run_local = run_local self.report_text = None self.default_resources = dict(_cores=1, _nodes=1) self.default_resources.update(default_resources) self.iocache = snakemake.io.IOCache() global config config = copy.deepcopy(self.overwrite_config) global cluster_config cluster_config = copy.deepcopy(self.overwrite_clusterconfig) global rules rules = Rules() global checkpoints checkpoints = Checkpoints()