def delete_dataset_collection(self, history_id, dataset_collection_id): """ Mark corresponding dataset collection as deleted. """ url = self.gi._make_url(self, history_id, contents=True) # Append the dataset_id to the base history contents URL url = '/'.join([url, "dataset_collections", dataset_collection_id]) Client._delete(self, payload={}, url=url)
def delete_dataset_collection(self, history_id, dataset_collection_id): """ Mark corresponding dataset collection as deleted. :type history_id: str :param history_id: Encoded history ID :type dataset_collection_id: str :param dataset_collection_id: Encoded dataset collection ID """ url = self.gi._make_url(self, history_id, contents=True) # Append the dataset_id to the base history contents URL url = '/'.join([url, "dataset_collections", dataset_collection_id]) Client._delete(self, url=url)
def delete_dataset(self, history_id, dataset_id): """ Mark corresponding dataset as deleted. :type history_id: str :param history_id: Encoded history ID :type dataset_id: str :param dataset_id: Encoded dataset ID """ url = self.gi._make_url(self, history_id, contents=True) # Append the dataset_id to the base history contents URL url = "/".join([url, dataset_id]) Client._delete(self, payload={}, url=url)
def show_history(self, history_id, contents=False): """ Get details of a given history. By default, just get the history meta information. If ``contents`` is set to ``True``, get the complete list of datasets in the given history. """ return Client._get(self, id=history_id, contents=contents)
def show_genome(self, id, num=None, chrom=None, low=None, high=None): """ Returns information about build <id> :type id: str :param id: Genome build ID to use :type num: str :param num: num :type chrom: str :param chrom: chrom :type low: str :param low: low :type high: str :param high: high """ params = {} if num: params["num"] = num if chrom: params["chrom"] = chrom if low: params["low"] = low if high: params["high"] = high return Client._get(self, id, params)
def get_folders(self, library_id, folder_id=None, name=None, deleted=False): """ Get all the folders or filter specific one(s) via the provided ``name`` or ``folder_id`` in data library with id ``library_id``. Provide only one argument: ``name`` or ``folder_id``, but not both. If ``name`` is set and multiple names match the given name, all the folders matching the argument will be returned. If ``deleted`` is set to ``True``, return folders that have been deleted. Return a list of JSON formatted dicts each containing basic information about a folder. """ if folder_id is not None and name is not None: raise ValueError('Provide only one argument between name or folder_id, but not both') library_contents = Client._get(self, id=library_id, contents=True) if folder_id is not None: folder = next((_ for _ in library_contents if _['type'] == 'folder' and _['id'] == folder_id), None) folders = [folder] if folder is not None else [] elif name is not None: folders = [_ for _ in library_contents if _['type'] == 'folder' and _['name'] == name] else: folders = [_ for _ in library_contents if _['type'] == 'folder'] return folders
def delete_library_dataset(self, library_id, dataset_id, purged=False): """ Deleta a library dataset in a data library :type library_id: string :param library_id: Encoded Library id where dataset is found in :type dataset_id: string :param dataset_id: Encoded dataset id to be deleted :type purged: Boolean :param purged: Indicate that the dataset should be purged (permanently deleted) :rtype: dict :return: A dictionary containing the datset id and whether the dataset has been deleted For example:: {u'deleted': True, u'id': u'60e680a037f41974'} """ url = self.gi._make_url(self, library_id, contents=True) # Append the dataset_id to the base history contents URL url = '/'.join([url, dataset_id]) return Client._delete(self, url=url, payload={'purged': purged})
def update_repository(self, id, tar_ball_path, commit_message=None): """ Update the contents of a Tool Shed repository with specified tar ball. :type id: str :param id: Encoded repository ID :type tar_ball_path: str :param tar_ball_path: Path to file containing tar ball to upload. :type commit_message: str :param commit_message: Commit message used for the underlying Mercurial repository backing Tool Shed repository. :rtype: dict :return: Returns a dictionary that includes repository content warnings. Most valid uploads will result in no such warning and an exception will be raised generally if there are problems. For example a successful upload will look like:: {u'content_alert': u'', u'message': u''} .. versionadded:: 0.5.2 """ url = '/'.join([self.gi._make_url(self, id), 'changeset_revision']) payload = { 'file': attach_file(tar_ball_path) } if commit_message is not None: payload['commit_message'] = commit_message try: return Client._post(self, id=id, payload=payload, files_attached=True, url=url) finally: payload['file'].close()
def show_job(self, job_id): """ Display information on a single job from current user :type job_id: string :param job_id: Specific job ID :rtype: dict :return: A description of single job For example:: { u'create_time': u'2014-03-01T16:17:29.828624', u'exit_code': 0, u'id': u'a799d38679e985db', u'inputs': { u'input': { u'id': u'ebfb8f50c6abde6d', u'src': u'hda'}}, u'model_class': u'Job', u'outputs': { u'output': { u'id': u'a799d38679e985db', u'src': u'hda'}}, u'params': { u'chromInfo': u'"/opt/galaxy-central/tool-data/shared/ucsc/chrom/?.len"', u'dbkey': u'"?"', u'seq_col': u'"2"', u'title_col': u'["1"]'}, u'state': u'ok', u'tool_id': u'tab2fasta', u'update_time': u'2014-03-01T16:17:31.930728'} """ return Client._get(self, id=job_id)
def update_dataset(self, history_id, dataset_id, **kwds): """ Update history dataset metadata. Some of the attributes that can be modified are documented below. :type history_id: str :param history_id: Encoded history ID :type dataset_id: str :param dataset_id: Id of the dataset :type name: str :param name: Replace history dataset name with the given string :type annotation: str :param annotation: Replace history dataset annotation with given string :type deleted: bool :param deleted: Mark or unmark history dataset as deleted :type visible: bool :param visible: Mark or unmark history dataset as visible :rtype: int :return: status code """ url = self.gi._make_url(self, history_id, contents=True) # Append the dataset_id to the base history contents URL url = '/'.join([url, dataset_id]) return Client._put(self, payload=kwds, url=url).status_code
def __show_item(self, library_id, item_id): """ Get details about a given library item. """ url = self.gi._make_url(self, library_id, contents=True) url = '/'.join([url, item_id]) return Client._get(self, url=url)
def create_local_user(self, username, user_email, password): """ Create a new Galaxy user. .. note:: For this method to work, the Galaxy instance must have the ``allow_user_creation`` option set to ``True`` and ``use_remote_user`` option set to ``False`` in the ``config/galaxy.ini`` configuration file. :type username: str :param username: Username of user to be created :type user_email: str :param user_email: Email of user to be created :type password: str :param password: password of user to be created :rtype: dict :return: dictionary containing information about the user """ payload = {} payload['username'] = username payload['email'] = user_email payload['password'] = password return Client._post(self, payload)
def get_current_user(self): """ Returns the user id associated with this Galaxy connection """ url = self.gi._make_url(self, None) url = '/'.join([url, 'current']) return Client._get(self, url=url)
def get_jobs(self): """ Get a list of jobs for current user :type state: string or list :param state: limit listing of jobs to those that match one of the included states. If none, all are returned. Valid Galaxy job states include: 'new', 'upload', 'waiting', 'queued', 'running', 'ok', 'error', 'paused', 'deleted', 'deleted_new' :type tool_id: string or list :param tool_id: limit listing of jobs to those that match one of the included tool_ids. If none, all are returned. :type history_id: string :param history_id: limit listing of jobs to those that match the history_id. If none, all are returned. :rtype: list :returns: list of dictionaries containing summary job information For example:: [{ u'create_time': u'2014-03-01T16:16:48.640550', u'exit_code': 0, u'id': u'ebfb8f50c6abde6d', u'model_class': u'Job', u'state': u'ok', u'tool_id': u'fasta2tab', u'update_time': u'2014-03-01T16:16:50.657399'}, {u'create_time': u'2014-03-01T16:05:34.851246', u'exit_code': 0, u'id': u'1cd8e2f6b131e891', u'model_class': u'Job', u'state': u'ok', u'tool_id': u'upload1', u'update_time': u'2014-03-01T16:05:39.558458'}] """ return Client._get(self)
def create_folder(self, library_id, folder_name, description=None, base_folder_id=None): """ Create a folder in a library. :type library_id: str :param library_id: library id to use :type folder_name: str :param folder_name: name of the new folder in the data library :type description: str :param description: description of the new folder in the data library :type base_folder_id: str :param base_folder_id: id of the folder where to create the new folder. If not provided, the root folder will be used """ # Get root folder ID if no ID was provided if base_folder_id is None: base_folder_id = self._get_root_folder_id(library_id) # Compose the payload payload = {} payload['name'] = folder_name payload['folder_id'] = base_folder_id payload['create_type'] = 'folder' if description is not None: payload['description'] = description return Client._post(self, payload, id=library_id, contents=True)
def show_repository(self, toolShed_id): """ Display information of a repository from Tool Shed :type toolShed_id: str :param toolShed_id: Encoded Tool Shed ID :rtype: dict :return: Information about the tool. For example:: {u'category_ids': [u'c1df3132f6334b0e', u'f6d7b0037d901d9b'], u'deleted': False, u'deprecated': False, u'description': u'Order Contigs', u'homepage_url': u'', u'id': u'287bd69f724b99ce', u'long_description': u'', u'name': u'best_tool_ever', u'owner': u'billybob', u'private': False, u'remote_repository_url': u'', u'times_downloaded': 0, u'type': u'unrestricted', u'url': u'/api/repositories/287bd69f724b99ce', u'user_id': u'5cefd48bc04af6d4'} .. versionchanged:: 0.4.1 Changed method name from ``show_tool`` to ``show_repository`` to better align with the Tool Shed concepts. """ return Client._get(self, id=toolShed_id)
def copy_from_dataset(self, library_id, dataset_id, folder_id=None, message=''): """ Copy a Galaxy dataset into a library. :type library_id: str :param library_id: id of the library where to place the uploaded file :type dataset_id: str :param dataset_id: id of the dataset to copy from :type folder_id: str :param folder_id: id of the folder where to place the uploaded files. If not provided, the root folder will be used :type message: str :param message: message for copying action """ if folder_id is None: folder_id = self._get_root_folder_id(library_id) payload = {} payload['folder_id'] = folder_id payload['create_type'] = 'file' payload['from_hda_id'] = dataset_id payload['ldda_message'] = message return Client._post(self, payload, id=library_id, contents=True)
def search_jobs(self, job_info): """ Return jobs for current user based payload content :type job_info: dict :param job_info: Dictionary containing description of requested job. This is in the same format as a request to POST /api/tools would take to initiate a job :rtype: list :returns: list of dictionaries containing summary job information of the jobs that match the requested job run This method is designed to scan the list of previously run jobs and find records of jobs that had the exact some input parameters and datasets. This can be used to minimize the amount of repeated work, and simply recycle the old results. """ payload = job_info url = self.gi._make_url(self) url = '/'.join([url, "search"]) return Client._post(self, url=url, payload=payload)
def get_current_history(self): """ Returns the current user's most recently used history object (not deleted) """ url = self.gi._make_url(self, None) url = '/'.join([url, 'most_recently_used']) return Client._get(self, url=url)
def get_libraries(self, library_id=None, name=None, deleted=False): """ Get all the libraries or filter for specific one(s) via the provided name or ID. Provide only one argument: ``name`` or ``library_id``, but not both. :type library_id: str :param library_id: filter for library by library id :type name: str :param name: If ``name`` is set and multiple names match the given name, all the libraries matching the argument will be returned. :type deleted: bool :param deleted: If set to ``True``, return libraries that have been deleted. :rtype: list :return: list of dicts each containing basic information about a library. """ if library_id is not None and name is not None: raise ValueError('Provide only one argument between name or library_id, but not both') libraries = Client._get(self, deleted=deleted) if library_id is not None: library = next((_ for _ in libraries if _['id'] == library_id), None) libraries = [library] if library is not None else [] if name is not None: libraries = [_ for _ in libraries if _['name'] == name] return libraries
def create_library(self, name, description=None, synopsis=None): """ Create a data library with the properties defined in the arguments. Return a list of JSON dicts, looking like so:: :type name: str :param name: Name of the new data library :type description: str :param description: Optional data library description :type synopsis: str :param synopsis: Optional data library synopsis :rtype: dict :return: details of the created library: {"id": "f740ab636b360a70", "name": "Library from bioblend", "url": "/api/libraries/f740ab636b360a70"} """ payload = {'name': name} if description: payload['description'] = description if synopsis: payload['synopsis'] = synopsis return Client._post(self, payload)
def show_job(self, job_id, full_details=False): """ Display information on a single job from current user :type job_id: str :param job_id: Specific job ID :type full_details: bool :param full_details: When ``True``, the complete list of details for the given job. :rtype: dict :return: A description of single job For example:: { u'create_time': u'2014-03-01T16:17:29.828624', u'exit_code': 0, u'id': u'a799d38679e985db', u'inputs': { u'input': { u'id': u'ebfb8f50c6abde6d', u'src': u'hda'}}, u'model_class': u'Job', u'outputs': { u'output': { u'id': u'a799d38679e985db', u'src': u'hda'}}, u'params': { u'chromInfo': u'"/opt/galaxy-central/tool-data/shared/ucsc/chrom/?.len"', u'dbkey': u'"?"', u'seq_col': u'"2"', u'title_col': u'["1"]'}, u'state': u'ok', u'tool_id': u'tab2fasta', u'update_time': u'2014-03-01T16:17:31.930728'} """ params = {} if full_details: params['full'] = full_details return Client._get(self, id=job_id, params=params)
def show_dataset_collection(self, history_id, dataset_collection_id): """ Get details about a given history dataset collection. """ url = self.gi._make_url(self, history_id, contents=True) url = '/'.join([url, "dataset_collections", dataset_collection_id]) return Client._get(self, url=url)
def set_library_permissions(self, library_id, access_in=None, modify_in=None, add_in=None, manage_in=None): """ Set the permissions for a library. Note: it will override all security for this library even if you leave out a permission type. :type library_id: str :param library_id: id of the library :type access_in: list :param access_in: list of role ids :type modify_in: list :param modify_in: list of role ids :type add_in: list :param add_in: list of role ids :type manage_in: list :param manage_in: list of role ids """ payload = {} if access_in: payload['LIBRARY_ACCESS_in'] = access_in if modify_in: payload['LIBRARY_MODIFY_in'] = modify_in if add_in: payload['LIBRARY_ADD_in'] = add_in if manage_in: payload['LIBRARY_MANAGE_in'] = manage_in url = '/'.join([self.gi._make_url(self, library_id), 'permissions']) return Client._post(self, payload, url=url)
def update_repository_for(ctx, tsi, id, repo_config): # TODO: enforce no "type" change. from bioblend.galaxy.client import Client description = repo_config.get("description", None) long_description = repo_config.get("long_description", None) remote_repository_url = repo_config.get("remote_repository_url", None) homepage_url = repo_config.get("homepage_url", None) categories = repo_config.get("categories", []) category_ids = find_category_ids(tsi, categories) _ensure_shed_description(description) kwds = dict( name=repo_config["name"], synopsis=description, ) if long_description is not None: kwds["description"] = long_description if remote_repository_url is not None: kwds["remote_repository_url"] = remote_repository_url if homepage_url is not None: kwds["homepage_url"] = homepage_url if category_ids is not None: kwds['category_ids[]'] = category_ids repo = Client._put(tsi.repositories, id=id, payload=kwds) return repo
def update_history(self, history_id, name=None, annotation=None, **kwds): """ Update history metadata information. Some of the attributes that can be modified are documented below. :type history_id: str :param history_id: Encoded history ID :type name: str :param name: Replace history name with the given string :type annotation: str :param annotation: Replace history annotation with given string :type deleted: bool :param deleted: Mark or unmark history as deleted :type published: bool :param published: Mark or unmark history as published :type importable: bool :param importable: Mark or unmark history as importable :type tags: list :param tags: Replace history tags with the given list :rtype: int :return: status code """ kwds['name'] = name kwds['annotation'] = annotation return Client._put(self, kwds, id=history_id).status_code
def show_data_table(self, data_table_id): """ Display information on a single data_table :type data_table_id: str :param data_table_id: ID of the data table :rtype: dict :return: A description of data_table and its content For example:: { "columns": ["value", "dbkey", "name", "path"], "fields": [ [ "test id", "test", "test name", "/opt/galaxy-dist/tool-data/test/seq/test id.fa" ] ], "model_class": "TabularToolDataTable", "name": "all_fasta" } """ return Client._get(self, id=data_table_id)
def update_dataset_collection(self, history_id, dataset_collection_id, **kwds): """ Update history dataset collection metadata. Some of the attributes that can be modified are documented below. :type history_id: str :param history_id: Encoded history ID :type dataset_collection_id: str :param dataset_collection_id: Encoded dataset_collection ID :type name: str :param name: Replace history dataset collection name with the given string :type deleted: bool :param deleted: Mark or unmark history dataset collection as deleted :type visible: bool :param visible: Mark or unmark history dataset collection as visible :rtype: int :return: status code """ url = self.gi._make_url(self, history_id, contents=True) url = '/'.join([url, "dataset_collections", dataset_collection_id]) return Client._put(self, payload=kwds, url=url).status_code
def create_history_tag(self, history_id, tag): """ Create history tag :type history_id: str :param history_id: Encoded history ID :type tag: str :param tag: Add tag to history :rtype: dict :return: A dictionary with information regarding the tag. For example:: {'id': 'f792763bee8d277a', 'model_class': 'HistoryTagAssociation', 'user_tname': 'NGS_PE_RUN', 'user_value': None} """ # empty payload since we are adding the new tag using the url payload = {} # creating the url url = self.url url = '/'.join([url, history_id, 'tags', tag]) return Client._post(self, payload, url=url)
def get_histories(self, history_id=None, name=None, deleted=False): """ Get all histories or filter the specific one(s) via the provided ``name`` or ``history_id``. Provide only one argument, ``name`` or ``history_id``, but not both. If ``deleted`` is set to ``True``, return histories that have been deleted. :type history_id: str :param history_id: Encoded history ID to filter on :type name: str :param name: Name of history to filter on :rtype: list :return: Return a list of history element dicts. If more than one history matches the given ``name``, return the list of all the histories with the given name """ if history_id is not None and name is not None: raise ValueError('Provide only one argument between name or history_id, but not both') histories = Client._get(self, deleted=deleted) if history_id is not None: history = next((_ for _ in histories if _['id'] == history_id), None) histories = [history] if history is not None else [] elif name is not None: histories = [_ for _ in histories if _['name'] == name] return histories
def get_current_user(self): """ Display information about the user associated with this Galaxy connection. :rtype: dict :return: a dictionary containing information about the current user """ url = self.gi._make_url(self, None) url = '/'.join([url, 'current']) return Client._get(self, url=url)
def export_workflow_json(self, workflow_id): """ Exports a workflow in json format :type workflow_id: string :param workflow_id: Encoded workflow ID """ url = self.gi._make_url(self) url = '/'.join([url, "download"]) url = '/'.join([url, workflow_id]) return Client._get(self, url=url)
def import_workflow_json(self, workflow_json): """ Imports a new workflow given a json representation of a previously exported workflow. """ payload = {} payload['workflow'] = workflow_json url = self.gi._make_url(self) url = '/'.join([url, "upload"]) return Client._post(self, url=url, payload=payload)
def get_ftp_files(self, deleted=False): """ Get a list of local files :rtype: list :return: A list of dicts with details on individual files on ftp. """ return Client._get(self)
def show_category(self, category_id): """ Get details of a given category. :type category_id: str :param category_id: Encoded category ID :rtype: dict :return: details of the given category """ return Client._get(self, id=category_id)
def undelete_history(self, history_id): """ Undelete a history :type history_id: str :param history_id: Encoded history ID """ url = self.gi._make_url(self, history_id, deleted=True) # Append the 'undelete' action to the history URL url = '/'.join([url, 'undelete']) return Client._post(self, payload={}, url=url)
def upload_dataset_from_library(self, history_id, lib_dataset_id): """ Upload a dataset into the history from a library. Requires the library dataset ID, which can be obtained from the library contents. """ payload = { 'content': lib_dataset_id, 'source': 'library', 'from_ld_id': lib_dataset_id, # compatibility with old API } return Client._post(self, payload, id=history_id, contents=True)
def delete_workflow(self, workflow_id): """ Delete a workflow identified by `workflow_id`. :type workflow_id: str :param workflow_id: Encoded workflow ID .. warning:: Deleting a workflow is irreversible - all workflow data will be permanently deleted. """ return Client._delete(self, id=workflow_id)
def get_data_tables(self): """ Get the list of all data tables. :rtype: list :return: A list of dicts with details on individual data tables. For example:: [{"model_class": "TabularToolDataTable", "name": "fasta_indexes"}, {"model_class": "TabularToolDataTable", "name": "bwa_indexes"}] """ return Client._get(self)
def delete_history(self, history_id, purge=False): """ Delete a history. If ``purge`` is set to ``True``, also purge the history. Note that for the purge option to work, ``allow_user_dataset_purge`` option must be set in the Galaxy's configuration file ``universe_wsgi.ini`` """ payload = {} if purge is True: payload['purge'] = purge return Client._delete(self, payload, id=history_id)
def show_folder(self, folder_id): """ Display information about a folder. :type folder_id: str :param folder_id: the folder's encoded id, prefixed by 'F' :rtype: dict :return: dictionary including details of the folder """ return Client._get(self, id=folder_id)
def get_library_permissions(self, library_id): """ Get the permessions for a library. :type library_id: str :param library_id: id of the library :rtype: dict :return: dictionary with all applicable permissions' values """ url = '/'.join([self.gi._make_url(self, library_id), 'permissions']) return Client._get(self, url=url)
def create_form(self, form_xml_text): """ Create a new form. :type form_xml_text: str :param form_xml_text: Form xml to create a form on galaxy instance :rtype: str :returns: Unique url of newly created form with encoded id """ payload = form_xml_text return Client._post(self, payload=payload)
def get_group_roles(self, group_id): """ Get the list of roles associated to the given group. :type group_id: str :param group_id: Encoded group ID :rtype: list of dicts :return: List of group roles' info """ url = '/'.join([self.gi._make_url(self, group_id), 'roles']) return Client._get(self, url=url)
def delete_group_user(self, group_id, user_id): """ Remove a user from the given group. :type group_id: str :param group_id: Encoded group ID :type user_id: str :param user_id: Encoded user ID to remove from the group """ url = '/'.join([self.gi._make_url(self, group_id), 'users', user_id]) return Client._delete(self, {}, url=url)
def delete_group_role(self, group_id, role_id): """ Remove a role from the given group. :type group_id: str :param group_id: Encoded group ID :type role_id: str :param role_id: Encoded role ID to remove from the group """ url = '/'.join([self.gi._make_url(self, group_id), 'roles', role_id]) return Client._delete(self, {}, url=url)
def cancel_invocation(self, workflow_id, invocation_id): """ Cancel the scheduling of a workflow. :type workflow_id: str :param workflow_id: Encoded workflow ID :type invocation_id: str :param invocation_id: Encoded workflow invocation ID """ url = self._invocation_url(workflow_id, invocation_id) return Client._delete(self, url=url)
def create_collection_func(element_identifiers, collection_type): payload = { "name": "dataset collection", "instance_type": "history", "history_id": history_id, "element_identifiers": element_identifiers, "collection_type": collection_type, "fields": None if collection_type != "record" else "auto", } dataset_collections_url = user_gi.url + "/dataset_collections" dataset_collection = Client._post(user_gi.histories, payload, url=dataset_collections_url) return dataset_collection
def search_tools(self, q, page=1, page_size=10): """ Search for tools in a Galaxy Tool Shed :type q: str :param q: query string for searching purposes :type page: int :param page: page requested :type page_size: int :param page_size: page size requested :rtype: dict :return: dictionary containing search hits as well as metadata for the search example: { u'hits': [ { u'matched_terms': [], u'score': 3.0, u'tool': { u'description': u'convert between various FASTQ quality formats', u'id': u'69819b84d55f521efda001e0926e7233', u'name': u'FASTQ Groomer', u'repo_name': None, u'repo_owner_username': u'devteam' } }, { u'matched_terms': [], u'score': 3.0, u'tool': { u'description': u'converts a bam file to fastq files.', u'id': u'521e282770fd94537daff87adad2551b', u'name': u'Defuse BamFastq', u'repo_name': None, u'repo_owner_username': u'jjohnson' } } ], u'hostname': u'https://testtoolshed.g2.bx.psu.edu/', u'page': u'1', u'page_size': u'2', u'total_results': u'118' } """ params = dict(q=q, page=page, page_size=page_size) return Client._get(self, params=params)
def export_history(self, history_id, gzip=True, include_hidden=False, include_deleted=False, wait=False): """ Start a job to create an export archive for the given history. :type history_id: str :param history_id: history ID :type gzip: bool :param gzip: create .tar.gz archive if ``True``, else .tar :type include_hidden: bool :param include_hidden: whether to include hidden datasets in the export :type include_deleted: bool :param include_deleted: whether to include deleted datasets in the export :type wait: bool :param wait: if ``True``, block until the export is ready; else, return immediately :rtype: str :return: ``jeha_id`` of the export, or empty if ``wait`` is ``False`` and the export is not ready. """ params = { 'gzip': gzip, 'include_hidden': include_hidden, 'include_deleted': include_deleted, } url = '%s/exports' % self.gi._make_url(self, history_id) while True: try: r = Client._put(self, {}, url=url, params=params) except ConnectionError as e: if e.status_code == 202: # export is not ready if wait: time.sleep(1) else: return '' else: raise else: break jeha_id = r['download_url'].rsplit('/', 1)[-1] return jeha_id
def _tool_post(self, payload, files_attached=False): if files_attached: # If files_attached - this will be posted as multi-part form data # and so each individual parameter needs to be encoded so can be # decoded as JSON by Galaxy (hence dumping complex parameters). # If no files are attached, the whole thing is posted as # application/json and dumped/loaded all at once by requests and # Galaxy. complex_payload_params = ["inputs"] for key in complex_payload_params: if key in payload: payload[key] = dumps(payload[key]) return Client._post(self, payload, files_attached=files_attached)
def import_workflow(path, admin_gi, user_gi, from_path=False): """Import a workflow path to specified Galaxy instance.""" if not from_path: importer = BioBlendImporterGalaxyInterface(admin_gi=admin_gi, user_gi=user_gi) workflow = _raw_dict(path, importer) return importer.import_workflow(workflow) else: # TODO: Update bioblend to allow from_path. payload = dict(from_path=path) workflows_url = user_gi._make_url(user_gi.workflows) workflow = Client._post(user_gi.workflows, payload, url=workflows_url) return workflow
def export_workflow_json(self, workflow_id): """ Exports a workflow :type workflow_id: str :param workflow_id: Encoded workflow ID :rtype: dict :return: Dict representing the workflow requested """ url = self.gi._make_url(self) url = _join(url, "download", workflow_id) return Client._get(self, url=url)
def import_workflow_json(self, workflow_json): """ Imports a new workflow given a json representation of a previously exported workflow. :type workflow_json: str :param workflow_json: JSON string representing the workflow to be imported """ payload = {'workflow': workflow_json} url = self.gi._make_url(self) url = _join(url, "upload") return Client._post(self, url=url, payload=payload)
def delete_library(self, library_id): """ Delete a data library. :type library_id: str :param library_id: Encoded data library ID identifying the library to be deleted .. warning:: Deleting a data library is irreversible - all of the data from the library will be permanently deleted. """ payload = {} return Client._delete(self, payload, id=library_id)
def delete_data_table(self, data_table_id, values): """ Delete an item from a data table. :type data_table_id: str :param data_table_id: ID of the data table :type values: str :param values: a "|" separated list of column contents, there must be a value for all the columns of the data table """ payload = {'values': values} return Client._delete(self, payload=payload, id=data_table_id)
def show_dataset_collection(self, history_id, dataset_collection_id): """ Get details about a given history dataset collection. :type history_id: str :param history_id: Encoded history ID :type dataset_collection_id: str :param dataset_collection_id: Encoded dataset collection ID """ url = self.gi._make_url(self, history_id, contents=True) url = '/'.join([url, "dataset_collections", dataset_collection_id]) return Client._get(self, url=url)
def search_repositories(self, q, page=1, page_size=10): """ Search for repositories in a Galaxy Tool Shed. :type q: str :param q: query string for searching purposes :type page: int :param page: page requested :type page_size: int :param page_size: page size requested :rtype: dict :return: dictionary containing search hits as well as metadata for the search. For example:: {u'hits': [{u'matched_terms': [], u'repository': {u'approved': u'no', u'description': u'Convert export file to fastq', u'full_last_updated': u'2015-01-18 09:48 AM', u'homepage_url': u'', u'id': u'bdfa208f0cf6504e', u'last_updated': u'less than a year', u'long_description': u'This is a simple too to convert Solexas Export files to FASTQ files.', u'name': u'export_to_fastq', u'remote_repository_url': u'', u'repo_owner_username': u'louise', u'times_downloaded': 164}, u'score': 4.92}, {u'matched_terms': [], u'repository': {u'approved': u'no', u'description': u'Convert BAM file to fastq', u'full_last_updated': u'2015-04-07 11:57 AM', u'homepage_url': u'', u'id': u'175812cd7caaf439', u'last_updated': u'less than a month', u'long_description': u'Use Picards SamToFastq to convert a BAM file to fastq. Useful for storing reads as BAM in Galaxy and converting to fastq when needed for analysis.', u'name': u'bam_to_fastq', u'remote_repository_url': u'', u'repo_owner_username': u'brad-chapman', u'times_downloaded': 138}, u'score': 4.14}], u'hostname': u'https://testtoolshed.g2.bx.psu.edu/', u'page': u'1', u'page_size': u'2', u'total_results': u'64'} """ params = dict(q=q, page=page, page_size=page_size) return Client._get(self, params=params)
def show_genome(self, id, num=None, chrom=None, low=None, high=None): """ Returns information about build <id> """ params = {} if num: params['num'] = num if chrom: params['chrom'] = chrom if low: params['low'] = low if high: params['high'] = high return Client._get(self, id, params)
def create_history(self, name=None): """ Create a new history, optionally setting the ``name``. :type name: str :param name: Optional name for new history :rtype: dict :return: Dictionary containing information about newly created history """ payload = {} if name is not None: payload['name'] = name return Client._post(self, payload)