def f2h_action(gfile_ids, f2h, galaxy_pass): selected_files = GenericFile.objects.filter(pk__in=gfile_ids) history_name = f2h.history_name git = f2h.galaxyinstancetracking user = f2h.added_by filelist = files2paths(selected_files) if not filelist: print('filelist empty') return [] gu = GalaxyUser.objects.get(internal_user=user, galaxyinstancetracking=git) api_key = gu.api_key galaxy_url = git.url gi = GalaxyInstance(galaxy_url, key=api_key) gi.verify = False filelist = files2paths(selected_files) print('ftp_host and port', git.ftp_host, git.ftp_port, gu.email, galaxy_pass) send_to_ftp(filelist, host=git.ftp_host, port=git.ftp_port, user=gu.email, password=galaxy_pass) uploaded_files, hist = transfer_filelist_from_ftp( gi, filelist, history_name=history_name) link_files_in_galaxy(uploaded_files, selected_files, git, library=False)
def get_gi_gu(user, git): gu = GalaxyUser.objects.get(internal_user=user, galaxyinstancetracking=git) galaxy_url = git.url gi = GalaxyInstance(galaxy_url, key=gu.api_key) gi.verify = False return gi, gu
def check_galaxy(api_key, galaxy_url): gi = GalaxyInstance(galaxy_url, key=api_key) gi.verify = False wc = WorkflowClient(gi) try: wc.get_workflows() except ConnectionError as e: raise forms.ValidationError( 'Something is wrong with Galaxy connection, please check')
def run(): requests.packages.urllib3.disable_warnings( requests.packages.urllib3.exceptions.InsecureRequestWarning) logging.getLogger("requests").setLevel(logging.ERROR) parser = argparse.ArgumentParser(description="Galaxy instance tool\ parsing, for integration in biotools/bioregistry") parser.add_argument("--config_file", help="config.ini file for regate or remag") parser.add_argument("--templateconfig", action='store_true', help="generate a config_file template") if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() if not args.templateconfig: if not os.path.exists(args.config_file): raise IOError("{0} doesn't exist".format(args.config_file)) config = Config(args.config_file, "regate") if not config.onlypush: gi = GalaxyInstance(config.galaxy_url_api, key=config.api_key) gi.verify = False try: TOOLS = gi.tools.get_tools() except ConnectionError, e: raise ConnectionError( "Connection with the Galaxy server {0} failed, {1}".format( config.galaxy_url_api, e)) tools_meta_data = [] if config.yaml_file: edam_dict = build_edam_dict(config.yaml_file) else: edam_dict = build_edam_dict(get_data_path('yaml_mapping.yaml')) tools_list = config.tools_default.split(',') detect_toolid_duplicate(TOOLS) for tool in TOOLS: if not tool['id'] in tools_list: try: tool_metadata = gi.tools.show_tool(tool_id=tool['id'], io_details=True, link_details=True) tools_meta_data.append(tool_metadata) except ConnectionError, e: logger.error( "Error during connection with exposed API method for tool {0}" .format(str(tool['id'])), exc_info=True) build_biotools_files(tools_meta_data, config, edam_dict)
def set_section_id(ts, repos, url_galaxy_ref): gi = GalaxyInstance(url_galaxy_ref) gi.verify = False tools = gi.tools.get_tools() clean_repos = [] for repo in repos: for revision in repo['revisions']: if not repo['tool_panel_section_id']: revision_info = ts.repositories.get_repository_revision_install_info(repo['name'], repo['owner'], revision) if 'valid_tools' in revision_info[1]: for tool in revision_info[1]['valid_tools']: panel_info = return_panel(tool['guid'], tools) if panel_info: repo['tool_panel_section_id'] = panel_info[0] repo['tool_panel_section_label'] = panel_info[1] clean_repos.append(repo) break return clean_repos
def run(): requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning) logging.getLogger("requests").setLevel(logging.ERROR) parser = argparse.ArgumentParser(description="Galaxy instance tool\ parsing, for integration in biotools/bioregistry") parser.add_argument("--config_file", help="config.ini file for regate or remag") parser.add_argument("--templateconfig", action='store_true', help="generate a config_file template") if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() if not args.templateconfig: if not os.path.exists(args.config_file): raise IOError("{0} doesn't exist".format(args.config_file)) config = Config(args.config_file, "regate") if not config.onlypush: gi = GalaxyInstance(config.galaxy_url_api, key=config.api_key) gi.verify = False try: TOOLS = gi.tools.get_tools() except ConnectionError, e: raise ConnectionError("Connection with the Galaxy server {0} failed, {1}".format(config.galaxy_url_api, e)) tools_meta_data = [] if config.yaml_file: edam_dict = build_edam_dict(config.yaml_file) else: edam_dict = build_edam_dict(get_data_path('yaml_mapping.yaml')) tools_list = config.tools_default.split(',') detect_toolid_duplicate(TOOLS) for tool in TOOLS: if not tool['id'] in tools_list: try: tool_metadata = gi.tools.show_tool(tool_id=tool['id'], io_details=True, link_details=True) tools_meta_data.append(tool_metadata) except ConnectionError, e: logger.error( "Error during connection with exposed API method for tool {0}".format(str(tool['id'])), exc_info=True) build_biotools_files(tools_meta_data, config, edam_dict)
def get_galaxy_workflow_inputs(w, user): wf = w.workflowfile git = w.galaxyinstancetracking wf_name = w.name api_key = GalaxyUser.objects.get(internal_user=user, galaxyinstancetracking=git).api_key galaxy_url = git.url gi = GalaxyInstance(galaxy_url, key=api_key) gi.verify = False wc = WorkflowClient(gi) wfd = wf.read() jsonload = json.loads(wfd) now = datetime.datetime.now() jsonload['name'] = '{} dj-upload[{} {}]'.format(jsonload['name'], wf_name, now.strftime("%Y-%m-%d")) wfimp = wc.import_workflow_json(jsonload) return check_workflow_data_inputs(wfimp['id'], wc), wfimp['id']
conf = yaml.load(fernet.decrypt(enc_conf).decode()) server = conf['server'] rest_protocol = conf['rest_protocol'] rest_port = conf['rest_port'] user = conf['user'] password = conf['password'] ftp_port = int(conf['ftp_port']) api_key = conf['api_key'] rest_url = '%s://%s:%d' % (rest_protocol, server, rest_port) history_name = 'bioinf_example' gi = GalaxyInstance(url=rest_url, key=api_key) gi.verify = False histories = gi.histories print('Existing histories:') for history in histories.get_histories(): if history['name'] == history_name: histories.delete_history(history['id']) print(' - ' + history['name']) print() ds_history = histories.create_history(history_name) print('Uploading file') ftp = ftplib.FTP() ftp.connect(host=server, port=ftp_port) ftp.login(user=user, passwd=password)
parsing, for integration in biotools/bioregistry") parser.add_argument("--config_file", help="config.ini file for regate or remag") parser.add_argument("--templateconfig", action='store_true', help="generate a config_file template") if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() if not args.templateconfig: if not os.path.exists(args.config_file): raise IOError("{0} doesn't exist".format(args.config_file)) config = Config(args.config_file, "regate") if not config.onlypush: gi = GalaxyInstance(config.galaxy_url_api, key=config.api_key) gi.verify = False try: TOOLS = gi.tools.get_tools() except ConnectionError, e: raise ConnectionError("Connection with the Galaxy server {0} failed, {1}".format(config.galaxy_url_api, e)) tools_meta_data = [] if config.yaml_file: edam_dict = build_edam_dict(config.yaml_file) else: edam_dict = build_edam_dict(os.path.join('$PREFIXDATA', 'yaml_mapping.yaml')) tools_list = config.tools_default.split(',') for tool in TOOLS: if not tool['id'] in tools_list: