Beispiel #1
0
def implementation(logger, args):
    list_of_files = {}
    if path_exists(args.sg_local_path, logger=logger, force=True):
        for (dirpath, dirnames, filenames) in os.walk(args.sg_local_path):
            for filename in filenames:
                list_of_files[filename] = os.sep.join([dirpath, filename])
        logger.debug(list_of_files)

    gi = GalaxyInstance(args.url, key=args.key)
    tools = gi.tools.get_tools()

    counter_singularity = 0
    counter_docker = 0
    match = {}
    unmatch = []

    for t in tools:
        t_id = t['id']
        t_xml_file = gi.tools.show_tool(t['id'])['config_file']

        container_name = None
        try:
            tool_xml = load(t_xml_file)
            requirements, containers = parse_requirements_from_xml(tool_xml)
            conda_targets = requirements_to_conda_targets(requirements)
            mulled_targets = [
                build_target(c.package, c.version) for c in conda_targets
            ]
            container_name = mulled_container_name("biocontainers",
                                                   mulled_targets)
        except Exception as ex:
            logger.exception('Caught an error at {} with tid: {}'.format(
                args.url, t_id))
            pass

        singularity = 'not_found'
        if container_name:
            container_name = container_name.lower()
            counter_docker += 1
            if os.path.basename(container_name) in list_of_files:
                singularity = os.path.join(args.sg_local_path,
                                           os.path.basename(container_name))
                counter_singularity += 1

            match[t_id] = {
                'docker': "docker://{}".format(container_name),
                'singularity': singularity
            }
        unmatch.append(t_id)
        print(t_id, container_name, singularity)
    dump(match, "{}_{}".format(args.url.split('/')[2], args.matched))
    dump(unmatch, "{}_{}".format(args.url.split('/')[2], args.notmatched))

    print("number of tools {}".format(len(tools)))
    print("number of docker images matched {}".format(counter_docker))
    print("number of singularity images in CVMFS {}".format(
        len(list_of_files)))
    print(
        "number of singularity images matched {}".format(counter_singularity))
Beispiel #2
0
    def __init__(self,
                 args=None,
                 path_from_cli=None,
                 path_from_package='config/config.yaml',
                 config_filename='config.yaml'):
        def copy_config_file_from_package(appname, src, dst):
            _from_package = resource_filename(appname, src)
            copyfile(_from_package, dst)

        self.loglevel = args.loglevel
        self.logfile = args.logfile
        logger = a_logger(self.__class__.__name__,
                          level=self.loglevel,
                          filename=self.logfile)

        cfg_dir = os.path.join(config_dir)
        config_file_path = os.path.join(cfg_dir, config_filename)

        # Create configuration file from default if needed
        if not path_exists(cfg_dir, logger, force=False):
            logger.info('Creating config dir {}'.format(cfg_dir))
            ensure_dir(cfg_dir)
        if not path_exists(config_file_path, logger, force=False):
            logger.info('Copying default config file from {} package '
                        'resource'.format(__appname__))
            copy_config_file_from_package(__appname__, path_from_package,
                                          config_file_path)

        config_file_paths = []
        if path_from_cli and path_exists(path_from_cli, logger, force=False):
            config_file_paths.append(WeightedPath(path_from_cli, 0))
        if path_exists(config_file_path, logger, force=False):
            config_file_paths.append(WeightedPath(config_file_path, 1))

        logger.debug("config file paths: {}".format(config_file_paths))

        config_file_path = sorted(config_file_paths)[0].path
        logger.info('Reading configuration from {}'.format(config_file_path))

        c = load_config(config_file_path)
        self.pipes_conf = c['pipelines'] if 'pipelines' in c else None
        self.default_conf = c['default_vars'] if 'default_vars' in c else None
Beispiel #3
0
    def get_profile(profile_label, profile_path, logger_):
        file_path = os.path.join(profile_path, '{}.yaml'.format(profile_label))

        if path_exists(file_path, logger_, force=False):
            msg = "Profile found at {}".format(file_path)
            print(msg)
            logger.info(msg)
            profile = load(file_path)
            return profile
        logger.info("Profile not found at {}".format(file_path))
        return None
Beispiel #4
0
    def clone(self, label):
        pipeline = Pipeline(self.conf[label],
                            loglevel=self.loglevel,
                            logfile=self.logfile)
        repo_dir = os.path.join(self.cache_dir, label)
        ensure_dir(repo_dir)
        if path_is_empty(repo_dir):
            print("Cloning {}".format(pipeline.url))
            Repo.clone_from(pipeline.url, repo_dir)
            repo = Repo(repo_dir)
            heads = repo.heads
            master = heads.master
            with open(os.path.join(repo_dir, ".git_repo_last_commit"),
                      'w') as filename:
                filename.write(pipeline.url)
                filename.write("\ncommit id: {}".format(master.commit))

            requirements_path = os.path.join(repo_dir,
                                             self.core_environment_file)
            if not path_exists(requirements_path):
                data = {
                    'channels': ['bioconda', 'conda-forge', 'defaults'],
                    'dependencies': ['python==3.6.1', 'pip']
                }
                dump(data, requirements_path)

            requirements_path = os.path.join(repo_dir, self.environment_file)
            if not path_exists(requirements_path):
                data = {
                    'channels': ['bioconda', 'conda-forge', 'defaults'],
                    'dependencies': ['snakemake', 'drmaa==0.7.8']
                }
                dump(data, requirements_path)

            print("commit id: {}".format(master.commit))
            print("Done.\n")
            self.logger.info('Cloned git repo at {} into {} '
                             'directory'.format(pipeline.url, repo_dir))
        else:
            self.logger.warning("Can't clone git repo {} "
                                "into {}".format(pipeline.url, repo_dir))
Beispiel #5
0
    def write_profile(default_config, pl_, profile_label, profile_path,
                      logger_):
        def merge_two_dicts(x, y):
            z = x.copy()  # start with x's keys and values
            z.update(y)  # modifies z with y's keys and values & returns None
            return z

        file_path = os.path.join(profile_path, '{}.yaml'.format(profile_label))
        if path_exists(file_path, logger_, force=False) and not args.force:
            msg = "{} profile already exists".format(file_path)
            print(msg)
            logger.error(msg)
            # sys.exit()
        else:
            to_dump = merge_two_dicts(
                default_config,
                pl_.playbook_vars_template(project_name=profile_label))
            dump(to_dump, file_path)
            logger.info("Created {} profile".format(file_path))
            print("Edit variables value into the {} file".format(file_path))
        return