def show_history(self, history_id, contents=False): """ Get details of a given history. By default, just get the history meta information. If ``contents`` is set to ``True``, get the complete list of datasets in the given history. """ return Client._get(self, id=history_id, contents=contents)
def _do_upload(self, **keywords): """ Set up the POST request and do the actual data upload to a data library. This method should not be called directly but instead refer to the methods specifc for the desired type of data upload. """ # If folder_id was not provided in the arguments, find the root folder ID if keywords.get("folder_id", None) is None: folders = self.show_library(library_id=keywords["library_id"], contents=True) for f in folders: if f["name"] == "/": folder_id = f["id"] break else: folder_id = keywords["folder_id"] # Compose the payload dict payload = {} payload["folder_id"] = folder_id payload["file_type"] = keywords.get("file_type", "auto") payload["dbkey"] = keywords.get("dbkey", "?") payload["create_type"] = "file" if keywords.get("file_url", None) is not None: payload["upload_option"] = "upload_file" payload["files_0|url_paste"] = keywords["file_url"] elif keywords.get("pasted_content", None) is not None: payload["upload_option"] = "upload_file" payload["files_0|url_paste"] = keywords["pasted_content"] elif keywords.get("server_dir", None) is not None: payload["upload_option"] = "upload_directory" payload["server_dir"] = keywords["server_dir"] return Client._post(self, payload, id=keywords["library_id"], contents=True)
def undelete_history(self, history_id): """ Undelete a history """ url = self.gi._make_url(self, history_id, deleted=True) # Append the 'undelete' action to the history URL url = '/'.join([url, 'undelete']) return Client._post(self, payload={}, url=url)
def upload_dataset_from_library(self, history_id, lib_dataset_id): """ Upload a dataset into the history from a library. Requires the library dataset ID, which can be obtained from the library contents. """ payload = {'from_ld_id': lib_dataset_id} return Client._post(self, payload, id=history_id, contents=True)
def create_history(self, name=None): """ Create a new history, optionally setting the ``name``. """ payload = {} if name is not None: payload['name'] = name return Client._post(self, payload)
def show_dataset(self, history_id, dataset_id): """ Get details about a given history dataset. The required ``history_id`` can be obtained from the datasets's history content details. """ url = self.gi._make_url(self, history_id, contents=True) # Append the dataset_id to the base history contents URL url = '/'.join([url, dataset_id]) return Client._get(self, url=url)
def show_library(self, library_id, contents=False): """ Get information about a library. If want to get contents of the library (rather than just the library details), set ``contents`` to ``True``. Return a list of JSON formatted dicts containing library details. """ return Client._get(self, id=library_id, contents=contents)
def delete_history(self, history_id, purge=False): """ Delete a history. If ``purge`` is set to ``True``, also purge the history. Note that for the purge option to work, ``allow_user_dataset_purge`` option must be set in the Galaxy's configuration file ``universe_wsgi.ini`` """ payload = {} if purge is True: payload['purge'] = purge return Client._delete(self, payload, id=history_id)
def run_workflow(self, workflow_id, dataset_map, history_id=None, history_name=None, import_inputs_to_history=False): """ Run the workflow identified by ``workflow_id`` :type workflow_id: string :param workflow_id: Encoded workflow ID :type dataset_map: string or dict :param dataset_map: A mapping of workflow inputs to datasets. The datasets source can be a LibraryDatasetDatasetAssociation (``ldda``), LibraryDataset (``ld``), or HistoryDatasetAssociation (``hda``). The map must be in the following format: ``{'<input>': {'id': <encoded dataset ID>, 'src': '[ldda, ld, hda]'}}`` (eg, ``{'23': {'id': '29beef4fadeed09f', 'src': 'ld'}}``) :type history_id: string :param history_id: The encoded history ID where to store the workflow output. ``history_id`` OR ``history_name`` should be provided but not both! :type history_name: string :param history_name: Create a new history with the given name to store the workflow output. ``history_id`` OR ``history_name`` should be provided but not both! :type import_inputs_to_history: bool :param import_inputs_to_history: If ``True``, used workflow inputs will be imported into the history. If ``False``, only workflow outputs will be visible in the given history. :rtype: dict :return: A dict containing the history ID where the outputs are placed as well as output dataset IDs. For example:: {u'history': u'64177123325c9cfd', u'outputs': [u'aa4d3084af404259']} """ payload = {} payload['workflow_id'] = workflow_id payload['ds_map'] = dataset_map if history_id: payload['history'] = 'hist_id={0}'.format(history_id) elif history_name: payload['history'] = history_name else: print "Must provide history_id or history_name argument" if import_inputs_to_history is False: payload['no_add_to_history'] = True return Client._post(self, payload)
def get_workflows(self): """ Get a list of all workflows :rtype: list :return: A list of workflow dicts. For example:: [{u'id': u'92c56938c2f9b315', u'name': u'Simple', u'url': u'/api/workflows/92c56938c2f9b315'}] """ return Client._get(self)
def get_users(self, deleted=False): """ Get a list of all registered users. If ``deleted`` is set to ``True``, get a list of deleted users. :rtype: list :return: A list of dicts with user details. For example:: [{u'email': u'*****@*****.**', u'id': u'dda47097d9189f15', u'url': u'/api/users/dda47097d9189f15'}] """ return Client._get(self, deleted=deleted)
def create_user(self, user_email): """ Create a new Galaxy user. .. note:: For this method to work, the Galaxy instance must have ``allow_user_creation`` and ``use_remote_user`` options set to ``True`` in the ``universe_wsgi.ini`` configuration file. Also note that setting ``use_remote_user`` will require an upstream authentication proxy server; however, if you do not have one, access to Galaxy via a browser will not be possible. """ payload = {} payload["remote_user_email"] = user_email return Client._post(self, payload)
def create_library(self, name, description=None, synopsis=None): """ Create a data library with the properties defined in the arguments. Return a list of JSON dicts, looking like so:: [{"id": "f740ab636b360a70", "name": "Library from blend", "url": "/api/libraries/f740ab636b360a70"}] """ payload = {"name": name} if description: payload["description"] = description if synopsis: payload["synopsis"] = synopsis return Client._post(self, payload)
def show_workflow(self, workflow_id): """ Display information needed to run a workflow :type workflow_id: string :param workflow_id: Encoded workflow ID :rtype: list :return: A description of the workflow and its inputs as a JSON object. For example:: {u'id': u'92c56938c2f9b315', u'inputs': {u'23': {u'label': u'Input Dataset', u'value': u''}}, u'name': u'Simple', u'url': u'/api/workflows/92c56938c2f9b315'} """ return Client._get(self, id=workflow_id)
def create_folder(self, library_id, folder_name, description=None, base_folder_id=None): """ Create a folder in the given library and the base folder. If ``base_folder_id`` is not provided, the new folder will be created in the root folder. """ # Get root folder ID if no ID was provided if base_folder_id is None: folders = self.show_library(library_id=library_id, contents=True) for f in folders: if f["name"] == "/": base_folder_id = f["id"] break # Compose the payload payload = {} payload["name"] = folder_name payload["folder_id"] = base_folder_id payload["create_type"] = "folder" if description is not None: payload["description"] = description return Client._post(self, payload, id=library_id, contents=True)
def get_libraries(self, library_id=None, name=None, deleted=False): """ Get all the libraries or filter for specific one(s) via the provided name or ID. Provide only one argument: ``name`` or ``library_id``. If ``name`` is set and multiple names match the given name, all the libraries matching the argument will be returned. Return a list of JSON formatted dicts each containing basic information about a library. """ libraries = Client._get(self, deleted=deleted) if name is not None or library_id is not None: filtered_libs = [] for lib in libraries: if name == lib["name"] or library_id == lib["id"]: filtered_libs.append(lib) # Library ID's are unique so break now that the lib was found if library_id is not None: break libraries = filtered_libs return libraries
def get_histories(self, history_id=None, name=None, deleted=False): """ Get all histories or filter the specific one(s) via the provided ``name`` or ``history_id``. Provide only one argument, ``name`` or ``history_id``, but not both. If ``deleted`` is set to ``True``, return histories that have been deleted. Return a list of history element dicts. If more than one history matches the given ``name``, return the list of all the histories with the given name. """ histories = Client._get(self, deleted=deleted) if name is not None or history_id is not None: filtered_hists = [] for history in histories: if name == history['name'] or history_id == history['id']: filtered_hists.append(history) # History ID's are unique so break now that the hist was found if history_id is not None: break histories = filtered_hists return histories
def show_dataset(self, dataset_id, deleted=False): """ Display information about and/or content of a dataset. This can be a history or a library dataset. """ return Client._get(self, id=dataset_id, deleted=deleted)
def show_user(self, user_id, deleted=False): """ Display information about a user. If ``deleted`` is set to ``True``, display information about a deleted user. """ return Client._get(self, id=user_id, deleted=deleted)