Exemplo n.º 1
0
    def __init__(self, url, key):
        GalaxyInstance.__init__(self, url, key)

        self.galaxy_input_path = ''
        self.library_name = ''  #library to store data folder
        self.MGS_folder = ''  #name folder to store library of MetaGenSense in galaxy
        self.roles = ''
Exemplo n.º 2
0
def f2h_action(gfile_ids, f2h, galaxy_pass):
    selected_files = GenericFile.objects.filter(pk__in=gfile_ids)
    history_name = f2h.history_name
    git = f2h.galaxyinstancetracking
    user = f2h.added_by
    filelist = files2paths(selected_files)

    if not filelist:
        print('filelist empty')
        return []

    gu = GalaxyUser.objects.get(internal_user=user, galaxyinstancetracking=git)
    api_key = gu.api_key
    galaxy_url = git.url
    gi = GalaxyInstance(galaxy_url, key=api_key)
    gi.verify = False

    filelist = files2paths(selected_files)
    print('ftp_host and port', git.ftp_host, git.ftp_port, gu.email,
          galaxy_pass)
    send_to_ftp(filelist,
                host=git.ftp_host,
                port=git.ftp_port,
                user=gu.email,
                password=galaxy_pass)

    uploaded_files, hist = transfer_filelist_from_ftp(
        gi, filelist, history_name=history_name)
    link_files_in_galaxy(uploaded_files, selected_files, git, library=False)
Exemplo n.º 3
0
 def __init__(self, url, key):                
     GalaxyInstance.__init__(self, url, key)   
     
     self.galaxy_input_path = ''
     self.library_name = '' #library to store data folder
     self.MGS_folder = ''   #name folder to store library of MetaGenSense in galaxy  
     self.roles = ''
def get_gi_gu(user, git):
    gu = GalaxyUser.objects.get(internal_user=user, galaxyinstancetracking=git)

    galaxy_url = git.url
    gi = GalaxyInstance(galaxy_url, key=gu.api_key)
    gi.verify = False

    return gi, gu
Exemplo n.º 5
0
def check_galaxy(api_key, galaxy_url):
    gi = GalaxyInstance(galaxy_url, key=api_key)
    gi.verify = False
    wc = WorkflowClient(gi)
    try:
        wc.get_workflows()
    except ConnectionError as e:
        raise forms.ValidationError(
            'Something is wrong with Galaxy connection, please check')
Exemplo n.º 6
0
    def __init__(self, url, user):
        u = urlsplit(url)
        if u.scheme != 'http' and u.scheme != 'https':
            raise ValueError("Invalid name node address")

        self.url = urlunparse((u.scheme, u.netloc, '', '', '', ''))
        self.localdir = ""
        self.prefix = 'GalaxyFS'
        self.lddaprefix = 'Libraries'
        self.hdaprefix = 'Histories'
        self.client = GalaxyInstance(self.url, user)
Exemplo n.º 7
0
def run():
    requests.packages.urllib3.disable_warnings(
        requests.packages.urllib3.exceptions.InsecureRequestWarning)
    logging.getLogger("requests").setLevel(logging.ERROR)
    parser = argparse.ArgumentParser(description="Galaxy instance tool\
        parsing, for integration in biotools/bioregistry")
    parser.add_argument("--config_file",
                        help="config.ini file for regate or remag")
    parser.add_argument("--templateconfig",
                        action='store_true',
                        help="generate a config_file template")

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    args = parser.parse_args()

    if not args.templateconfig:
        if not os.path.exists(args.config_file):
            raise IOError("{0} doesn't exist".format(args.config_file))
        config = Config(args.config_file, "regate")
        if not config.onlypush:
            gi = GalaxyInstance(config.galaxy_url_api, key=config.api_key)
            gi.verify = False
            try:
                TOOLS = gi.tools.get_tools()
            except ConnectionError, e:
                raise ConnectionError(
                    "Connection with the Galaxy server {0} failed, {1}".format(
                        config.galaxy_url_api, e))

            tools_meta_data = []
            if config.yaml_file:
                edam_dict = build_edam_dict(config.yaml_file)
            else:
                edam_dict = build_edam_dict(get_data_path('yaml_mapping.yaml'))
            tools_list = config.tools_default.split(',')
            detect_toolid_duplicate(TOOLS)
            for tool in TOOLS:
                if not tool['id'] in tools_list:
                    try:
                        tool_metadata = gi.tools.show_tool(tool_id=tool['id'],
                                                           io_details=True,
                                                           link_details=True)
                        tools_meta_data.append(tool_metadata)
                    except ConnectionError, e:
                        logger.error(
                            "Error during connection with exposed API method for tool {0}"
                            .format(str(tool['id'])),
                            exc_info=True)
            build_biotools_files(tools_meta_data, config, edam_dict)
Exemplo n.º 8
0
 def read_galaxy_history(self):
     """
     Read Galaxy's current history and inherit all the tags from a parent
     to a child history item
     """
     # connect to running Galaxy's instance
     g_instance = GalaxyInstance(self.galaxy_url, self.galaxy_api_key,
                                 self.history_id)
     history = g_instance.histories
     job = g_instance.jobs
     # if the history id is not supplied, then update tags for the most recently used history
     if self.history_id is None:
         update_history = history.get_most_recently_used_history()
     else:
         try:
             update_history = history.show_history(self.history_id)
         except Exception as exception:
             print("Some problem occurred with history: %s" %
                   self.history_id)
             print(exception)
             return
     update_history_id = update_history["id"]
     print("History name: %s" % update_history["name"])
     print("History id: %s" % update_history_id)
     self.find_dataset_parents_update_tags(history, job, update_history_id)
Exemplo n.º 9
0
def update_file(finfo, sample_info, config):
    """Update file in Galaxy data libraries.
    """
    if GalaxyInstance is None:
        raise ImportError("Could not import bioblend.galaxy")
    if "dir" not in config:
        raise ValueError("Galaxy upload requires `dir` parameter in config specifying the "
                         "shared filesystem path to move files to.")
    folder_name = "%s_%s" % (config["fc_date"], config["fc_name"])
    storage_dir = utils.safe_makedir(os.path.join(config["dir"], folder_name))
    if finfo.get("type") == "directory":
        storage_file = None
        if finfo.get("ext") == "qc":
            pdf_file = qcsummary.prep_pdf(finfo["path"], config)
            if pdf_file:
                finfo["path"] = pdf_file
                finfo["type"] = "pdf"
                storage_file = filesystem.copy_finfo(finfo, storage_dir, pass_uptodate=True)
    else:
        storage_file = filesystem.copy_finfo(finfo, storage_dir, pass_uptodate=True)
    if "galaxy_url" in config and "galaxy_api_key" in config:
        galaxy_url = config["galaxy_url"]
        if not galaxy_url.endswith("/"):
            galaxy_url += "/"
        gi = GalaxyInstance(galaxy_url, config["galaxy_api_key"])
    else:
        raise ValueError("Galaxy upload requires `galaxy_url` and `galaxy_api_key` in config")
    if storage_file and sample_info and not finfo.get("index", False):
        _to_datalibrary_safe(storage_file, gi, folder_name, sample_info, config)
Exemplo n.º 10
0
    def _runworkflow(hid, dsid):
        ''' 
        http://61.50.134.132/runworkflow/?key=3c0ce871b56dbe1dd6b745144fd323bf&wid=c6ef01d8a6d43836 
        Use [email protected]'s api-key "3c0ce871b56dbe1dd6b745144fd323bf" 
        '''
        # wid = c6ef01d8a6d43836
        # ldda = d368c393b367e434
        # hda = 3608c6e62163f50a
        wname = wnameList[start_stage]

        try:
            gi = GalaxyInstance(settings.GALAXY_URL, key=apikey)
            workflows = gi.workflows.get_workflows(name=wname)
            wid = workflows[0]['id']
            wf = gi.workflows.show_workflow(wid)
            #result = json.dumps(wf, cls=DjangoJSONEncoder)
            #return HttpResponse(result)

            datamap = {}
            for step_id in wf['inputs']:
                datamap[step_id] = {'src': 'hda', 'id': dsid}
            res = gi.workflows.run_workflow(wid, datamap, history_id=hid)
            print res
        except:
            print 'error when _runworkflow'
            return 0
Exemplo n.º 11
0
def get_galaxy_instance(galaxy_url=None, galaxy_api_key=None):
    """
    Private utility function to instantiate and configure a :class:`bioblend.GalaxyInstance`
    :type galaxy_url: str
    :param galaxy_url: the URL of the Galaxy server
    :type galaxy_api_key: str
    :param galaxy_api_key: a registered Galaxy API KEY
    :rtype: :class:`bioblend.GalaxyInstance`
    :return: a new :class:`bioblend.GalaxyInstance` instance
    """
    # configure `galaxy_url`
    if galaxy_url is None:
        if ENV_KEY_GALAXY_URL not in _os.environ:
            raise RuntimeError(
                "Galaxy URL not defined!  Use --server or the environment variable {} "
                "or specify it in the test configuration".format(
                    ENV_KEY_GALAXY_URL))
        else:
            galaxy_url = _os.environ[ENV_KEY_GALAXY_URL]

    # configure `galaxy_api_key`
    if galaxy_api_key is None:
        if ENV_KEY_GALAXY_API_KEY not in _os.environ:
            raise RuntimeError(
                "Galaxy API key not defined!  Use --api-key or the environment variable {} "
                "or specify it in the test configuration".format(
                    ENV_KEY_GALAXY_API_KEY))
        else:
            galaxy_api_key = _os.environ[ENV_KEY_GALAXY_API_KEY]

    # initialize the galaxy instance
    return GalaxyInstance(galaxy_url, galaxy_api_key)
Exemplo n.º 12
0
def __main__():
    parser = argparse.ArgumentParser(description = 'Import workflows from a local directory')
    parser.add_argument('-p', '--port', help = 'port number from your docker container that you map to your host machine. The default is 80', default = '80')
    parser.add_argument('-k', '--key', help = 'user API key')
    args = parser.parse_args()
    
    # port and api key
    port = args.port
    api_key = args.key
    
    # galaxy client instance
    galaxy_home = environ['GALAXY_HOME']
    galaxy_client = GalaxyInstance(url = "http://127.0.0.1:" + port, key = api_key)
    
    if galaxy_client:
        # workflow client instance
        workflow_client = WorkflowClient(galaxy_client)
        
        my_workflows_dir = environ['GALAXY_HOME'] + '/my_workflows'
        workflow_files = []
        for f in listdir(my_workflows_dir):
            if isfile(join(my_workflows_dir, f)):
                f_path = join(my_workflows_dir, f)
                workflow_client.import_workflow_from_local_path(f_path)
                print('Imported workflow: ' + f)
Exemplo n.º 13
0
    def run(self):
        try:
            self.load()  # Load from config
            self.gi = GalaxyInstance(self.IP, key=self.API_KEY)

            if not self.manual and self.reference is None:
                # No reference and it isn't using files in upload folder
                self.t.time_print(
                    "No reference file specified with -r, please input one or use the --manual"
                    " flag to use a reference file that you put in the upload folder."
                )
                exit(1)

            if self.noextract and not self.manual:
                self.t.time_print(
                    "[Warning] Using manual flag since noextract was specified without manual."
                )
                self.manual = True

            return self.main()  # Return the path to the results zip
        except:
            import traceback

            # Print error to file
            self.t.time_print("[Error Dump]\n" + traceback.format_exc())
            raise
def main():
    parser = argparse.ArgumentParser(
        description=
        'Writes True to stdout if a tool/owner combination does not exist on a Galaxy instance'
    )
    parser.add_argument('-g', '--galaxy_url', help='Galaxy server URL')
    parser.add_argument('-a', '--api_key', help='API key for galaxy server')
    parser.add_argument('-n', '--name', help='Tool name')
    parser.add_argument('-o', '--owner', help='Tool owner')

    args = parser.parse_args()
    galaxy_url = args.galaxy_url
    api_key = args.api_key
    name = args.name
    owner = args.owner

    gal = GalaxyInstance(galaxy_url, api_key)
    cli = ToolShedClient(gal)
    u_repos = cli.get_repositories()
    tools_with_name_and_owner = [
        t for t in u_repos if t['name'] == name and t['owner'] == owner
        and t['status'] == 'Installed'
    ]
    if not tools_with_name_and_owner:
        sys.stdout.write(
            'True'
        )  # we did not find the name/owner combination so we say that the tool is new
    else:
        sys.stdout.write('False')
Exemplo n.º 15
0
 def test_missing_scheme_real_url(self):
     galaxy_url = os.environ['BIOBLEND_GALAXY_URL']
     # Strip the scheme from galaxy_url
     scheme_sep = '://'
     if scheme_sep in galaxy_url:
         galaxy_url = galaxy_url.partition(scheme_sep)[2]
     GalaxyInstance(url=galaxy_url)
Exemplo n.º 16
0
def _galaxy_library_upload(finfo, sample_info, config):
    """Upload results to galaxy library.
    """
    folder_name = "%s_%s" % (config["fc_date"], config["fc_name"])
    storage_dir = utils.safe_makedir(os.path.join(config["dir"], folder_name))
    if finfo.get("type") == "directory":
        storage_file = None
        if finfo.get("ext") == "qc":
            pdf_file = qcsummary.prep_pdf(finfo["path"], config)
            if pdf_file:
                finfo["path"] = pdf_file
                finfo["type"] = "pdf"
                storage_file = filesystem.copy_finfo(finfo,
                                                     storage_dir,
                                                     pass_uptodate=True)
    else:
        storage_file = filesystem.copy_finfo(finfo,
                                             storage_dir,
                                             pass_uptodate=True)
    if "galaxy_url" in config and "galaxy_api_key" in config:
        galaxy_url = config["galaxy_url"]
        if not galaxy_url.endswith("/"):
            galaxy_url += "/"
        gi = GalaxyInstance(galaxy_url, config["galaxy_api_key"])
    else:
        raise ValueError(
            "Galaxy upload requires `galaxy_url` and `galaxy_api_key` in config"
        )
    if storage_file and sample_info and not finfo.get(
            "index", False) and not finfo.get("plus", False):
        _to_datalibrary_safe(storage_file, gi, folder_name, sample_info,
                             config)
Exemplo n.º 17
0
    def add_instance(cls, url):

        instance = Instance.query.filter_by(url=url).first()
        if instance is None:
            instance = Instance(url=url)
            db.session.add(instance)

        try:
            galaxy_instance = GalaxyInstance(url=url)
            instance_config = galaxy_instance.config.get_config()

            instance.update_date = datetime.now()
            instance.allow_user_creation = instance_config[
                'allow_user_creation']
            instance.brand = instance_config['brand']
            instance.enable_quotas = 'enable_quotas' in instance_config and instance_config[
                'enable_quotas']
            instance.require_login = '******' in instance_config and instance_config[
                'require_login']
            instance.terms_url = instance_config['terms_url']
            instance.version = instance_config['version_major']

            url_data = urlparse(url)
            try:
                instance_location = requests.get('http://ip-api.com/json/%s' %
                                                 url_data.netloc)
            except requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout:
                print "Unable to get location data for %s" % url_data.netloc
            else:
Exemplo n.º 18
0
def runworkflow(req):
    ''' 
    http://61.50.134.132/runworkflow/?key=3c0ce871b56dbe1dd6b745144fd323bf&wid=c6ef01d8a6d43836 
    Use [email protected]'s api-key "3c0ce871b56dbe1dd6b745144fd323bf" 
    '''
    # wid = c6ef01d8a6d43836
    # ldda = d368c393b367e434
    # hda = 3608c6e62163f50a
    wname = req.POST['wid']
    apikey = '3c0ce871b56dbe1dd6b745144fd323bf'  #req.POST['key']
    try:
        gi = GalaxyInstance(settings.GALAXY_URL, key=apikey)
        workflows = gi.workflows.get_workflows(name=wname)
        wid = workflows[0]['id']
        wf = gi.workflows.show_workflow(wid)
        result = json.dumps(wf, cls=DjangoJSONEncoder)
        return HttpResponse(result)

        datamap = {}
        for step_id in wf['inputs']:
            datamap[step_id] = {'src': 'ld', 'id': 'd368c393b367e434'}
        res = gi.workflows.run_workflow(wid,
                                        datamap,
                                        history_name='New output history')

        result = json.dumps(res, cls=DjangoJSONEncoder)
        return HttpResponse(result)

    except:
        return HttpResponse([])
Exemplo n.º 19
0
def connectgalaxy(apikey, galaxyurl):
    """
    @param apikey:
    @param galaxyurl:
    returns an object galaxyinstance
    """
    return GalaxyInstance(url=galaxyurl, key=apikey)
Exemplo n.º 20
0
def start_container(**kwargs):
    """Starts a docker container with the galaxy image. Returns a named tuple with the url, a GalaxyInstance object,
    the container attributes, and the container itself."""
    # We start a container from the galaxy image. We detach it. Port 80 is exposed to the host at a random port.
    # The random port is because we need mac compatibility. On GNU/linux a better option would be not to expose it
    # and use the internal ip address instead.
    # But alas, the trappings of a proprietary BSD kernel compel us to do ugly workarounds.

    container = client.containers.run(GALAXY_IMAGE,
                                      detach=True,
                                      ports={'80/tcp': None},
                                      **kwargs)
    container_id = container.attrs.get('Id')
    print(container_id)

    # This seems weird as we also can just get container.attrs but for some reason
    # the network settings are not loaded in container.attrs. With the get request
    # these attributes are loaded
    container_attributes = client.containers.get(container_id).attrs

    # Venturing into deep nested dictionaries.
    exposed_port = container_attributes.get('NetworkSettings').get(
        'Ports').get('80/tcp')[0].get('HostPort')

    container_url = "http://localhost:{0}".format(exposed_port)
    galaxy_wait(
        container_url, timeout=60
    )  # We are only going to wait 60 seconds. These are tests, and we are impatient!
    yield GalaxyContainer(url=container_url,
                          container=container,
                          attributes=container_attributes,
                          gi=GalaxyInstance(container_url, key="admin"))
    container.remove(force=True)
Exemplo n.º 21
0
def main():
    galaxyInstance = GalaxyInstance(url=GALAXY_URL, key=API_KEY)
    toolClient = ToolClient(galaxyInstance)
    histories = HistoryClient(galaxyInstance)
    workflowsClient = WorkflowClient(galaxyInstance)
    libraryClient = LibraryClient(galaxyInstance)

    brassica_library = libraryClient.get_libraries(
        name=' Evolutionary Systems Biology')
    files = libraryClient.show_library(brassica_library[0]['id'],
                                       contents=True)
    #print(files)
    itemp = 0
    for f in files:
        if f['type'] == 'folder':
            continue  # do nothing, try next
        #initial set
        #if itemp == 31:
        #	break

        #print ("Name " + f['name'])

        replicate = f['name'].split('_')[-1].split('.')[0]
        #print replicate
        if replicate == '1':
            itemp = itemp + 1
            if not (itemp >= 71 and itemp <= 92):
                continue
            base = f['name'].split('_')[:-1]
            #print base
            forward_name = f['name']
            reverse_name = '_'.join(base) + '_2.fastq.bz2'
            forward_id = f['id']
            files2 = libraryClient.show_library(brassica_library[0]['id'],
                                                contents=True)
            for f2 in files2:
                if f2['name'] == reverse_name:
                    reverse_id = f2['id']
            print forward_name
            print reverse_name
            new_history_name = f['name'].split('_')[7] + "_" + f['name'].split(
                '_')[-3] + "_" + f['name'].split('_')[-2]
            print new_history_name
            hist = histories.create_history(name=new_history_name)
            dataset_F = histories.upload_dataset_from_library(
                hist['id'], forward_id)
            dataset_R = histories.upload_dataset_from_library(
                hist['id'], reverse_id)
            datamap = {}
            datamap['0'] = {'src': 'hda', 'id': dataset_F['id']}
            datamap['1'] = {'src': 'hda', 'id': dataset_R['id']}
            workflows = workflowsClient.get_workflows(name="Maize HISAT 2.1")
            workflow = workflows[0]
            try:
                w = workflowsClient.run_workflow(workflow['id'],
                                                 datamap,
                                                 history_id=hist['id'])
            except:
                print('Next')
Exemplo n.º 22
0
def implementation(logger, args):
    list_of_files = {}
    if path_exists(args.sg_local_path, logger=logger, force=True):
        for (dirpath, dirnames, filenames) in os.walk(args.sg_local_path):
            for filename in filenames:
                list_of_files[filename] = os.sep.join([dirpath, filename])
        logger.debug(list_of_files)

    gi = GalaxyInstance(args.url, key=args.key)
    tools = gi.tools.get_tools()

    counter_singularity = 0
    counter_docker = 0
    match = {}
    unmatch = []

    for t in tools:
        t_id = t['id']
        t_xml_file = gi.tools.show_tool(t['id'])['config_file']

        container_name = None
        try:
            tool_xml = load(t_xml_file)
            requirements, containers = parse_requirements_from_xml(tool_xml)
            conda_targets = requirements_to_conda_targets(requirements)
            mulled_targets = [
                build_target(c.package, c.version) for c in conda_targets
            ]
            container_name = mulled_container_name("biocontainers",
                                                   mulled_targets)
        except Exception as ex:
            logger.exception('Caught an error at {} with tid: {}'.format(
                args.url, t_id))
            pass

        singularity = 'not_found'
        if container_name:
            container_name = container_name.lower()
            counter_docker += 1
            if os.path.basename(container_name) in list_of_files:
                singularity = os.path.join(args.sg_local_path,
                                           os.path.basename(container_name))
                counter_singularity += 1

            match[t_id] = {
                'docker': "docker://{}".format(container_name),
                'singularity': singularity
            }
        unmatch.append(t_id)
        print(t_id, container_name, singularity)
    dump(match, "{}_{}".format(args.url.split('/')[2], args.matched))
    dump(unmatch, "{}_{}".format(args.url.split('/')[2], args.notmatched))

    print("number of tools {}".format(len(tools)))
    print("number of docker images matched {}".format(counter_docker))
    print("number of singularity images in CVMFS {}".format(
        len(list_of_files)))
    print(
        "number of singularity images matched {}".format(counter_singularity))
Exemplo n.º 23
0
def get_gi(api_key, galaxy_url='http://192.168.2.218'):
    # api_key = get_galaxy_api_key(email)
    if api_key is not None:
        # 192.168.2.218 is ctbgx.sanbi.ac.za
        gi = GalaxyInstance(url=galaxy_url, key=api_key)
    else:
        gi = None
    return gi
Exemplo n.º 24
0
def api_gal_instance(url, key, verify_cert=True):
    ## catch_warnings is not thread-safe
    with warnings.catch_warnings():
        if not verify_cert:
            import urllib3
            warnings.filterwarnings(
                "ignore", category=urllib3.exceptions.InsecureRequestWarning)
        return GalaxyInstance(url=url, key=key, verify=verify_cert)
Exemplo n.º 25
0
def main():
    log.basicConfig(stream=sys.stdout,
                    level=log.INFO,
                    format='%(asctime)s - %(message)s')

    short_options = "hg:a:l:d:m:"
    long_options = [
        "help", "galaxy=", "api_key=", "library_name=", "description=",
        "manifest="
    ]
    # The following assumes all but the first option lisetd in long_options is requried
    required_options = RequiredOptions(
        [o.rstrip('=') for o in long_options[1:]])
    try:
        arguments, values = getopt.getopt(sys.argv[1:], short_options,
                                          long_options)
    except getopt.error as err:
        log.error("Error parsing arguments: %s" % err)
        sys.exit(2)
    for current_argument, current_value in arguments:
        if current_argument in ("-g", "--galaxy"):
            galaxy = current_value
            required_options.resolve('galaxy')
        elif current_argument in ("-a", "--api_key"):
            api_key = current_value
            required_options.resolve('api_key')
        elif current_argument in ("-l", "--library_name"):
            lib_name = current_value
            required_options.resolve('library_name')
        elif current_argument in ("-d", "--library_description"):
            lib_description = current_value
            required_options.resolve('description')
        elif current_argument in ("-m", "--manifest"):
            data_manifest = current_value
            required_options.resolve('manifest')
        elif current_argument in ("-h", "--help"):
            print_help()
            sys.exit(0)
    # Verify that all of the required options have been specified
    if not required_options.optionsResolved():
        print("Required option(s) missing: " +
              ', '.join(required_options.required_options))
        print_help()
        sys.exit(1)

    if is_url(data_manifest):
        r = requests.get(data_manifest)
        data_manifest = 'dm.yaml'
        with open(data_manifest, 'wb') as f:
            f.write(r.content)
    with open(data_manifest) as f:
        data_manifest = yaml.safe_load(f)
    gi = GalaxyInstance(galaxy, api_key)
    lib_name = lib_name
    lib_description = lib_description

    lib = create_library(gi, lib_name, lib_description)
    upload_data(gi, lib, data_manifest)
Exemplo n.º 26
0
def uninstall_tools(galaxy_server, api_key, names, force):
    galaxy_instance = GalaxyInstance(url=galaxy_server, key=api_key)
    toolshed_client = ToolShedClient(galaxy_instance)

    temp_tool_list_file = 'tmp/installed_tool_list.yml'
    # TODO: Switch to using bioblend to obtain this list
    # ephemeris uses bioblend but without using ephemeris we cut out the need to for a temp file
    os.system('get-tool-list -g %s -a %s -o %s --get_all_tools' %
              (galaxy_server, api_key, temp_tool_list_file))

    tools_to_uninstall = []
    with open(temp_tool_list_file) as tool_file:
        installed_tools = yaml.safe_load(tool_file.read())['tools']
    if not installed_tools:
        raise Exception('No tools to uninstall')
    os.system('rm %s' % temp_tool_list_file)

    for name in names:
        revision = None
        if '@' in name:
            (name, revision) = name.split('@')
        matching_tools = [
            t for t in installed_tools if t['name'] == name and (
                not revision or revision in t['revisions'])
        ]
        if len(matching_tools) == 0:
            id_string = 'name %s revision %s' % (
                name, revision) if revision else 'name %s' % name
            sys.stderr.write('*** Warning: No tool with %s\n' % id_string)
        elif len(matching_tools) > 1 and not force:
            sys.stderr.write(
                '*** Warning: More than one toolshed tool found for %s.  ' %
                name +
                'Not uninstalling any of these tools.  Run script with --force (-f) flag to uninstall anyway\n'
            )
        else:  # Either there is only one matching tool for the name and revision, or there are many and force=True
            for tool in matching_tools:
                tool_copy = tool.copy()
                if revision:
                    tool_copy['revisions'] = [revision]
                tools_to_uninstall.append(tool_copy)

    for tool in tools_to_uninstall:
        try:
            name = tool['name']
            owner = tool['owner']
            tool_shed_url = tool['tool_shed_url']
            revision = tool['revisions'][0]
            sys.stderr.write('Uninstalling %s at revision %s\n' %
                             (name, revision))
            return_value = toolshed_client.uninstall_repository_revision(
                name=name,
                owner=owner,
                changeset_revision=revision,
                tool_shed_url=tool_shed_url)
            sys.stderr.write(str(return_value) + '\n')
        except KeyError as e:
            sys.stderr.write(e)
Exemplo n.º 27
0
def check_histories(run, api_key, host, logger):
    galaxy_instance = GalaxyInstance(host, key=api_key)
    history_client = HistoryClient(galaxy_instance)
    history_json_d = run + '/output'
    histories = read_all_histories(history_json_d, logger)
    (all_successful, all_running, all_failed, all_except, all_waiting,
     upload_history) = get_history_status(histories, history_client, logger)
    return (all_successful, all_running, all_failed, all_except, all_waiting,
            upload_history)
Exemplo n.º 28
0
def set_section_id(ts, repos, url_galaxy_ref):
    gi = GalaxyInstance(url_galaxy_ref)
    gi.verify = False
    tools = gi.tools.get_tools()
    clean_repos = []
    for repo in repos:
        for revision in repo['revisions']:
            if not repo['tool_panel_section_id']:
                revision_info = ts.repositories.get_repository_revision_install_info(repo['name'], repo['owner'], revision)
                if 'valid_tools' in revision_info[1]:
                    for tool in revision_info[1]['valid_tools']:
                        panel_info = return_panel(tool['guid'], tools)
                        if panel_info:
                            repo['tool_panel_section_id'] = panel_info[0]
                            repo['tool_panel_section_label'] = panel_info[1]
                            clean_repos.append(repo)
                            break
    return clean_repos
Exemplo n.º 29
0
    def upload_query_into_galaxy(self):
        """Upload a json query into galaxy"""
        if not self.galaxy_history:
            self.init_galaxy()
        file_path = "tests/data/graphState_simple_query.json"
        filename = "graphstate.json"

        galaxy = GalaxyInstance(self.gurl, self.gkey)
        return galaxy.tools.upload_file(file_path, self.galaxy_history['id'], file_name=filename, file_type='json')
Exemplo n.º 30
0
 def impersonate(self, user=None, user_key=None) -> GalaxyInstance:
     """
     Returns a GalaxyInstance for the given user_key. If user is provided,
     user_key is fetched from Galaxy.
     """
     if user is not None:
         user_id = self.instance.users.get_users(f_name=user)[0]["id"]
         user_key = self.instance.users.get_user_apikey(user_id)
     return GalaxyInstance(self.url, key=user_key)
Exemplo n.º 31
0
	def set_user_api(self):
		"""
		Note: error message tacked on to self.data_stores for display back to user.
		"""
		self.user_api = GalaxyInstance(url=self.api_url, key=self.user_api_key)

		if not self.user_api:
			self.data_stores.append({'name':'Error: user Galaxy API connection was not set up correctly.  Try getting another user API key.', 'id':'none'})
			return
Exemplo n.º 32
0
    def upload_dataset_into_galaxy(self):
        """Upload a dataset into galaxy"""
        if not self.galaxy_history:
            self.init_galaxy()
        file_path = "test-data/transcripts.tsv"
        filename = "transcripts.tsv"

        galaxy = GalaxyInstance(self.gurl, self.gkey)
        return galaxy.tools.upload_file(file_path, self.galaxy_history['id'], file_name=filename, file_type='tabular')
Exemplo n.º 33
0
def run():
    requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
    logging.getLogger("requests").setLevel(logging.ERROR)
    parser = argparse.ArgumentParser(description="Galaxy instance tool\
        parsing, for integration in biotools/bioregistry")
    parser.add_argument("--config_file", help="config.ini file for regate or remag")
    parser.add_argument("--templateconfig", action='store_true', help="generate a config_file template")

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    args = parser.parse_args()

    if not args.templateconfig:
        if not os.path.exists(args.config_file):
            raise IOError("{0} doesn't exist".format(args.config_file))
        config = Config(args.config_file, "regate")
        if not config.onlypush:
            gi = GalaxyInstance(config.galaxy_url_api, key=config.api_key)
            gi.verify = False
            try:
                TOOLS = gi.tools.get_tools()
            except ConnectionError, e:
                raise ConnectionError("Connection with the Galaxy server {0} failed, {1}".format(config.galaxy_url_api,
                                                                                                 e))

            tools_meta_data = []
            if config.yaml_file:
                edam_dict = build_edam_dict(config.yaml_file)
            else:
                edam_dict = build_edam_dict(get_data_path('yaml_mapping.yaml'))
            tools_list = config.tools_default.split(',')
            detect_toolid_duplicate(TOOLS)
            for tool in TOOLS:
                if not tool['id'] in tools_list:
                    try:
                        tool_metadata = gi.tools.show_tool(tool_id=tool['id'], io_details=True, link_details=True)
                        tools_meta_data.append(tool_metadata)
                    except ConnectionError, e:
                        logger.error(
                            "Error during connection with exposed API method for tool {0}".format(str(tool['id'])),
                            exc_info=True)
            build_biotools_files(tools_meta_data, config, edam_dict)
def main( options ):

    # read from config
    config = ConfigParser.ConfigParser()
    config.read(options.config)

    fetch_tool = config.get('main', 'genome_fetch_tool')
    url = "http://localhost"
    sleep_time = float(config.get('main', 'sleep_time'))

    # The environment variables are set by the parent container
    admin_email = os.environ.get('GALAXY_DEFAULT_ADMIN_USER', '*****@*****.**')
    admin_pass = os.environ.get('GALAXY_DEFAULT_ADMIN_PASSWORD', 'admin')
    
    genomes = config.get('genomes', 'ids').strip().split('\n')
    index_tools = config.get('build_indexers', 'ids').strip().split('\n')

    # should test for valid config options
    # establish connection to galaxy instance
    gi = GalaxyInstance(url=url, email=admin_email, password=admin_pass)

    # should test valid connection
    log.info("List of valid histories: %s" % gi.histories.get_histories())

    # fetch genomes
    dbkeys = dict()
    for dbkey in genomes:
        if dbkey not in dbkeys:
            tool_inputs={'dbkey':dbkey, 'reference_source|reference_source_selector': 'ucsc', 'reference_source|requested_dbkey': dbkey }
            log.info("Fetching dbkey: %s" % dbkey)

            try:
                dbkeys[ dbkey ] = gi.tools.run_tool(history_id=None, tool_id=fetch_tool, tool_inputs=tool_inputs)

            except ConnectionError as inst:
                if '\"dbkey\": \"An invalid option was selected' in inst.body:
                    log.error("Galaxy instance does not recognize genome key: %s" % dbkey)
                raise
        else:
            log.info("The dbkey (%s) was specified more than once, skipping additional specification." % ( dbkey ))

    gi.make_get_request(urlparse.urljoin(url,'api/tool_data/all_fasta/reload'))

    # start indexers
    log.info("Start building genome indices.")
    indexing_tools = []
    while dbkeys:
        for dbkey, value in dbkeys.items():
            if gi.datasets.show_dataset( value['outputs'][0]['id'] )['state'] in ['ok', 'error']:

                # refresh the tool data tables
                log.info("Refreshing tool-data tables.")
                log.info( gi.make_get_request(urlparse.urljoin(url,'api/tool_data/all_fasta/reload')).text )
                time.sleep(2)
                for tool_id in index_tools:
                    log.info("Indexing %s with %s." % (dbkey, tool_id) )
                    try:
                        indexing_tools.append(gi.tools.run_tool(history_id=None, tool_id=tool_id, tool_inputs={ 'all_fasta_source':dbkey }))

                    except ConnectionError as inst:
                        if '\"text\": \"no tool\",' in inst.body:
                            log.info("The tool %s cannot be located. Please check it is installed in your galaxy instance." % (tool_id) )
                        else:
                            raise

                del dbkeys[ dbkey ]
        if dbkeys:
            time.sleep(sleep_time)
            log.info(".")

    # Wait for indexers to finish
    while indexing_tools:
        for i, indexing_tool_value in enumerate( indexing_tools ):
            if gi.datasets.show_dataset( indexing_tool_value['outputs'][0]['id'] )['state'] in ['ok', 'error']:
                log.info('Finished %s.' % indexing_tool_value)
                del indexing_tools[i]
                break
        if indexing_tools:
            time.sleep(sleep_time)

    log.info('All indexers have been run, please check results.')
Exemplo n.º 35
0
    parser = argparse.ArgumentParser(description="Galaxy instance tool\
        parsing, for integration in biotools/bioregistry")
    parser.add_argument("--config_file", help="config.ini file for regate or remag")
    parser.add_argument("--templateconfig", action='store_true', help="generate a config_file template")

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    args = parser.parse_args()
    if not args.templateconfig:
        if not os.path.exists(args.config_file):
            raise IOError("{0} doesn't exist".format(args.config_file))
        config = Config(args.config_file, "regate")
        if not config.onlypush:
            gi = GalaxyInstance(config.galaxy_url_api, key=config.api_key)
            gi.verify = False
            try:
                TOOLS = gi.tools.get_tools()
            except ConnectionError, e:
                raise ConnectionError("Connection with the Galaxy server {0} failed, {1}".format(config.galaxy_url_api,
                                                                                                 e))

            tools_meta_data = []
            if config.yaml_file:
                edam_dict = build_edam_dict(config.yaml_file)
            else:
                edam_dict = build_edam_dict(os.path.join('$PREFIXDATA', 'yaml_mapping.yaml'))

            tools_list = config.tools_default.split(',')
            for tool in TOOLS: