Пример #1
0
 def configfile(self, fp):
     """ Update the global config with data from the given file. """
     global config
     self.configfiles.append(fp)
     c = snakemake.io.load_configfile(fp)
     update_config(config, c)
     update_config(config, self.overwrite_config)
Пример #2
0
 def configfile(self, jsonpath):
     """ Update the global config with the given dictionary. """
     global config
     self.configfiles.append(jsonpath)
     c = snakemake.io.load_configfile(jsonpath)
     update_config(config, c)
     update_config(config, self.overwrite_config)
Пример #3
0
def test_update_config():
    a = {'foo':{'bar':{'foo': 'bar', 'bar':'foo'}}}
    b = {'foo':{'bar':{'foo': 'barfoo'}}}
    update_config(a, b)
    c = a
    update_config(b, a)
    d = b
    assert c == {'foo': {'bar': {'foo': 'barfoo', 'bar': 'foo'}}}
    assert d == {'foo': {'bar': {'bar': 'foo', 'foo': 'barfoo'}}}
Пример #4
0
def Init(argv):
    parser = argparse.ArgumentParser(
        "mars init",
        description=
        "Creates a config file for MARS, optionally populated with values")
    parser.add_argument(
        '-o',
        '--output',
        help="Output file to write config file (default: stdout)")
    parser.add_argument(
        '-c',
        '--configfile',
        type=argparse.FileType('r'),
        help=
        "Path to another config file whose values will be added to this one (where possible)"
    )
    parser.add_argument('--force',
                        action='store_true',
                        help="Overwite output file if it exists")
    parser.add_argument(
        "values",
        nargs=argparse.REMAINDER,
        help=("Config values to be added to the config file in format "
              "`key:value`. Overrides values from other config file if both "
              "present."))
    args = parser.parse_args(argv)
    if args.output and Path(args.output).exists() and not args.force:
        mars_error("Chosen output file exists. Use --force to overwrite.")
    elif args.output:
        output = open(args.output, 'w')
    else:
        output = sys.stdout

    old_config = yaml.safe_load(args.configfile) if args.configfile else {}
    # Loading a blank file will return None from yaml.safe_load()
    if old_config is None:
        old_config = {}
    cmdline_kv = {}
    for kv_pair in args.values:
        try:
            key, value = kv_pair.strip().split(":")
            cmdline_kv[key] = value
        except ValueError as e:
            mars_error("Could not parse key:value '{}': {}".format(kv_pair, e))
    update_config(old_config, cmdline_kv)
    config, unused_keys = create_config(**old_config)

    output.write(config)

    if unused_keys:
        logger.warn(
            "Warning: the following keys were specified but unused: {}".format(
                unused_keys))
    logger.info(
        "Config values not specified are commented out in the config file. "
        "Uncomment the relevant lines and add appropriate values as necessary."
    )
Пример #5
0
def update_config(config):
    """
    Populates config file with default config values.
    And made changes if necessary.
    """

    # get default values and update them with values specified in config file
    default_config = make_default_config()
    utils.update_config(default_config, config)

    return default_config
Пример #6
0
def update_config(config):
    """
    Populates config file with default config values.
    And made changes if necessary.

    """

    # in old version java_mem was used, new is mem
    if ('java_mem' in config) and (not ('mem' in config)):
        config['mem'] = config['java_mem']

    # get default values and update them with values specified in config file
    default_config = make_default_config()
    utils.update_config(default_config, config)

    return default_config
Пример #7
0
 def post(self, name):
     # get data from post
     data = request.get_json()
     pipeline = PipelineModel.find_by_name(name)
     if not pipeline:
         return {'message': f"Pipeline {name} not found in Database"}
     # load config json into config dict
     path_to_snakefile = os.path.join(path_to_static, name, 'Snakefile')
     # load user configs if given
     config = pipeline.json()['config']
     if data:
         user_config = data['config']
         # use the function load_config from snakemake itself!!
         update_config(config, user_config)
     workdir = config['workdir']
     os.makedirs(workdir, exist_ok=True)
     path_to_config_file = os.path.join(workdir, 'config.yaml')
     path_to_dag = os.path.join(workdir, 'dag.json')
     with open(path_to_config_file, 'w+') as stream:
         yaml.dump(config, stream, default_flow_style=False)
     try:
         dagcmd = f"snakemake --snakefile {path_to_snakefile} --configfile {path_to_config_file} --d3dag > {path_to_dag}"
         # ! place dag in d3dag and return
         snakedag = Popen(dagcmd,
                          shell=True,
                          stdout=PIPE,
                          stderr=STDOUT,
                          close_fds=True)
     except CalledProcessError as er:
         print(f'Snakemake returned Error code {er.returncode}')
     snakedag.communicate()
     with open(path_to_dag, 'r') as file:
         d3dag = json.load(file)
     info = {'dag': d3dag, 'config': config}
     # run the snakemake jobs
     # try:
     #     subprocess.check_call(['snakemake', '--snakefile', path_to_snakefile, '--configfile', path_to_config_file])
     # except subprocess.CalledProcessError as er:
     #     print(f'Snakemake returned Error code {er.returncode}')
     return info
Пример #8
0
def register_metadata(metadata_file, config):
    """Read an SRA project file and register metadata in sml_config. Will
    issue a warning if file does not exists.

    Args: 
      metadata - file name
      config - configuration to update
    """
    metadata_list = []
    import sys
    if metadata_file in sys.argv:
        return config
    try:
        with open(metadata_file, "r") as fh:
            reader = csv.DictReader(fh.readlines())
        metadata_list = [row for row in reader]
        run2sample = {row["Run"]:row["SampleName"] for row in metadata_list}
        config_default = {
            'bio.ngs.settings' : {
                'sampleinfo' : metadata_file
            },
            'bio.ngs.tools.sratools': {
                '_datadir': os.path.dirname(metadata_file),
                '_run2sample' : run2sample,
                '_metadata' : metadata_list
            },
        }
        update_config(config_default, config)
        config = config_default
        
    except Exception:
        raise Exception("""

        no metadata file '{metadata}' found

        please initiate analysis by running 'snakemake {metadata}'

        """.format(metadata=metadata_file))
    return config
Пример #9
0
def check_config_default(c):
    for fn in [c['config_default']]:
        assert op.isfile(fn), "cannot read %s" % fn

    cfg_default = yaml.safe_load(open(c['config_default'], 'r'))
    update_config(cfg_default, c)
    c = cfg_default

    for fn in [c['config_job_default']]:
        assert op.isfile(fn), "cannot read %s" % fn

    cfg_job = read_job_config(c)
    update_config(cfg_job, c)
    c = cfg_job

    dirh0, dirc0 = c['dir_project'], c['dir_cache']
    pid, wid, oid = c['pid'], c['wid'], c['oid']
    c['dirh'] = op.join(dirh0, pid, wid)
    c['dirc'] = op.join(dirc0, pid, wid)
    c['dirr'] = op.join(dirh0, pid, wid, oid)
    dirh, dirc, dirr = c['dirh'], c['dirc'], c['dirr']
    dirr_l = op.join(dirc, oid)
    for subdir in [dirh, dirc, dirr, c['tmpdir']]:
        if not op.isdir(subdir):
            makedirs(subdir)
    make_symlink(dirr, dirr_l)

    dirh_l = op.join(dirc, 'primary')
    dirc_l = op.join(dirh, 'cache')
    make_symlink(dirc, dirc_l)
    make_symlink(dirh, dirh_l)

    xdic = read_genome_config(c)
    gdic = {g: dict() for g in xdic.keys()}
    c['x'] = xdic
    c['g'] = gdic

    return c
Пример #10
0
ap = argparse.ArgumentParser()
ap.add_argument('config', help='Main config.yaml file')
ap.add_argument('hub_config', help='Track hub config YAML file')
ap.add_argument('--additional-configs',
                nargs='+',
                help='Additional config files with which to update the main '
                'config')
args = ap.parse_args()

# Access configured options. See comments in example hub_config.yaml for
# details
config = yaml.load(open(args.config), Loader=yaml.FullLoader)

if args.additional_configs:
    for cfg in args.additional_configs:
        update_config(config, yaml.load(open(cfg), Loader=yaml.FullLoader))

hub_config = yaml.load(open(args.hub_config), Loader=yaml.FullLoader)

hub, genomes_file, genome, trackdb = default_hub(
    hub_name=hub_config['hub']['name'],
    short_label=hub_config['hub']['short_label'],
    long_label=hub_config['hub']['long_label'],
    email=hub_config['hub']['email'],
    genome=hub_config['hub']['genome'])

c = ChIPSeqConfig(
    config, os.path.join(os.path.dirname(args.config),
                         'chipseq_patterns.yaml'))

# Set up subgroups based on unique values from columns specified in the config
Пример #11
0
 def configfile(self, jsonpath):
     """ Update the global config with the given dictionary. """
     global config
     c = snakemake.io.load_configfile(jsonpath)
     update_config(config, c)
     update_config(config, self.overwrite_config)
Пример #12
0
def test_update_config_with_string():
    with pytest.raises(AttributeError):
        update_config({}, "foo")
Пример #13
0
def update_config(config_new):

    utils.update_config("config.yaml", config_new)
    return
Пример #14
0
ap = argparse.ArgumentParser()
ap.add_argument('config', help='Main config.yaml file')
ap.add_argument('hub_config', help='Track hub config YAML file')
ap.add_argument('--additional-configs',
                nargs='+',
                help='Additional config files with which to update the main '
                'config')
args = ap.parse_args()

# Access configured options. See comments in example hub_config.yaml for
# details
config = yaml.load(open(args.config))

if args.additional_configs:
    for cfg in args.additional_configs:
        update_config(config, yaml.load(open(cfg)))

hub_config = yaml.load(open(args.hub_config))

hub, genomes_file, genome, trackdb = default_hub(
    hub_name=hub_config['hub']['name'],
    short_label=hub_config['hub']['short_label'],
    long_label=hub_config['hub']['long_label'],
    email=hub_config['hub']['email'],
    genome=hub_config['hub']['genome'])

c = ChIPSeqConfig(
    config, os.path.join(os.path.dirname(args.config),
                         'chipseq_patterns.yaml'))

# Set up subgroups based on unique values from columns specified in the config
Пример #15
0
def Init(argv):
    parser = argparse.ArgumentParser(
        "venus init",
        description="Creates config and samplesheet files for VENUS")
    parser.add_argument(
        '-o',
        '--output',
        default="config.yml",
        help="Output file to write config file (default: config.yml)")
    parser.add_argument(
        '-c',
        '--configfile',
        type=argparse.FileType('r'),
        help=
        "Path to another config file whose values will be added to this one (where possible)"
    )
    parser.add_argument('--force',
                        action='store_true',
                        help="Overwite output file if it exists")
    parser.add_argument(
        "values",
        nargs=argparse.REMAINDER,
        default=["output_dir:venus_output", "samplesheet_fp:samplesheet.tsv"],
        help=("Config values to be added to the config file in format "
              "`key:value`. Overrides values from other config file if both "
              "present. (default: %(default)s)"))
    args = parser.parse_args(argv)
    if Path(args.output).exists() and not args.force:
        venus_error(
            "Output file '{}' exists. Use --force to overwrite.".format(
                args.output))
    else:
        output = open(args.output, 'w')

    old_config = yaml.safe_load(args.configfile) if args.configfile else {}
    # Loading a blank file will return None from yaml.safe_load()
    if old_config is None:
        old_config = {}
    cmdline_kv = {}
    if not args.values:
        args.values = [
            "output_dir:venus_output", "samplesheet_fp:samplesheet.tsv"
        ]
        logger.info("Default output_dir name chosen: 'venus_output'")
        logger.info("Default samplesheet_fp chosen: 'samplesheet.tsv'")
        logger.info("Edit these if they don't match your setup!")
    for kv_pair in args.values:
        try:
            key, value = kv_pair.strip().split(":")
            cmdline_kv[key] = value
        except ValueError as e:
            venus_error("Could not parse key:value '{}': {}".format(
                kv_pair, e))
    update_config(old_config, cmdline_kv)
    config, unused_keys = create_config(**old_config)

    output.write(config)

    if unused_keys:
        logger.warn(
            "Warning: the following keys were specified but unused: {}".format(
                unused_keys))
    logger.info(
        "Config values not specified are commented out in the config file. "
        "Uncomment the relevant lines and add appropriate values as necessary."
    )
    logger.info("** Config file written to {} **".format(args.output))
Пример #16
0
default_config_sections = ["db", "quant", "filter", "analysis", "qc", "bfq", "samples"]
for section in default_config_sections:
    if section not in config:
        config[section] = {}

# default config
main_fn = srcdir("main.config")
with open(main_fn) as fh:
    CONF = yaml.load(fh, Loader=Loader) or {}

GCF_SECRET = os.environ.get("GCF_SECRET")
if GCF_SECRET:
    with open(GCF_SECRET) as fh:
        SECRETS = yaml.load(fh, Loader=Loader) or {}
    update_config(CONF, SECRETS)

# library preparation kit specific configuration
libprep_fn = srcdir("libprep.config")
with open(libprep_fn) as fh:
    LIBPREP_CONF = yaml.load(fh, Loader=Loader) or {}
kit = config.get("libprepkit")
if kit is not None:
    if len(config["read_geometry"]) > 1:
        kit += " PE"
    else:
        kit += " SE"
if kit in LIBPREP_CONF:
    # overwrite default config
    update_config(CONF, LIBPREP_CONF[kit])  
else: