def implementation(logger, args): list_of_files = {} if path_exists(args.sg_local_path, logger=logger, force=True): for (dirpath, dirnames, filenames) in os.walk(args.sg_local_path): for filename in filenames: list_of_files[filename] = os.sep.join([dirpath, filename]) logger.debug(list_of_files) gi = GalaxyInstance(args.url, key=args.key) tools = gi.tools.get_tools() counter_singularity = 0 counter_docker = 0 match = {} unmatch = [] for t in tools: t_id = t['id'] t_xml_file = gi.tools.show_tool(t['id'])['config_file'] container_name = None try: tool_xml = load(t_xml_file) requirements, containers = parse_requirements_from_xml(tool_xml) conda_targets = requirements_to_conda_targets(requirements) mulled_targets = [ build_target(c.package, c.version) for c in conda_targets ] container_name = mulled_container_name("biocontainers", mulled_targets) except Exception as ex: logger.exception('Caught an error at {} with tid: {}'.format( args.url, t_id)) pass singularity = 'not_found' if container_name: container_name = container_name.lower() counter_docker += 1 if os.path.basename(container_name) in list_of_files: singularity = os.path.join(args.sg_local_path, os.path.basename(container_name)) counter_singularity += 1 match[t_id] = { 'docker': "docker://{}".format(container_name), 'singularity': singularity } unmatch.append(t_id) print(t_id, container_name, singularity) dump(match, "{}_{}".format(args.url.split('/')[2], args.matched)) dump(unmatch, "{}_{}".format(args.url.split('/')[2], args.notmatched)) print("number of tools {}".format(len(tools))) print("number of docker images matched {}".format(counter_docker)) print("number of singularity images in CVMFS {}".format( len(list_of_files))) print( "number of singularity images matched {}".format(counter_singularity))
def parse_file(self, xml_filepath): """ Parse the given XML file for visualizations data. :returns: visualization config dictionary """ xml_tree = load(xml_filepath) visualization = self.parse_visualization(xml_tree.getroot()) return visualization
def __init_schedulers(self): config_file = self.app.config.workflow_schedulers_config_file use_default_scheduler = False if not config_file or (not os.path.exists(config_file) and not self.app.config.is_set('workflow_schedulers_config_file')): log.info("No workflow schedulers plugin config file defined, using default scheduler.") use_default_scheduler = True elif not os.path.exists(config_file): log.info(f"Cannot find workflow schedulers plugin config file '{config_file}', using default scheduler.") use_default_scheduler = True if use_default_scheduler: self.__init_default_scheduler() else: self.DEFAULT_BASE_HANDLER_POOLS = ('workflow-schedulers',) plugins_element = load(config_file).getroot() self.__init_schedulers_for_element(plugins_element) if not self.__handlers_configured and self.__stack_has_pool: # Stack has a pool for us so override inherited config and use the pool self.__init_handlers() self.__handlers_configured = True elif use_default_scheduler: self._set_default_handler_assignment_methods()