def _do_upload(self, **keywords): """ Set up the POST request and do the actual data upload to a data library. This method should not be called directly but instead refer to the methods specifc for the desired type of data upload. """ # If folder_id was not provided in the arguments, find the root folder ID if keywords.get("folder_id", None) is None: folders = self.show_library(library_id=keywords["library_id"], contents=True) for f in folders: if f["name"] == "/": folder_id = f["id"] break else: folder_id = keywords["folder_id"] # Compose the payload dict payload = {} payload["folder_id"] = folder_id payload["file_type"] = keywords.get("file_type", "auto") payload["dbkey"] = keywords.get("dbkey", "?") payload["create_type"] = "file" if keywords.get("file_url", None) is not None: payload["upload_option"] = "upload_file" payload["files_0|url_paste"] = keywords["file_url"] elif keywords.get("pasted_content", None) is not None: payload["upload_option"] = "upload_file" payload["files_0|url_paste"] = keywords["pasted_content"] elif keywords.get("server_dir", None) is not None: payload["upload_option"] = "upload_directory" payload["server_dir"] = keywords["server_dir"] return Client._post(self, payload, id=keywords["library_id"], contents=True)
def undelete_history(self, history_id): """ Undelete a history """ url = self.gi._make_url(self, history_id, deleted=True) # Append the 'undelete' action to the history URL url = '/'.join([url, 'undelete']) return Client._post(self, payload={}, url=url)
def upload_dataset_from_library(self, history_id, lib_dataset_id): """ Upload a dataset into the history from a library. Requires the library dataset ID, which can be obtained from the library contents. """ payload = {'from_ld_id': lib_dataset_id} return Client._post(self, payload, id=history_id, contents=True)
def create_history(self, name=None): """ Create a new history, optionally setting the ``name``. """ payload = {} if name is not None: payload['name'] = name return Client._post(self, payload)
def run_workflow(self, workflow_id, dataset_map, history_id=None, history_name=None, import_inputs_to_history=False): """ Run the workflow identified by ``workflow_id`` :type workflow_id: string :param workflow_id: Encoded workflow ID :type dataset_map: string or dict :param dataset_map: A mapping of workflow inputs to datasets. The datasets source can be a LibraryDatasetDatasetAssociation (``ldda``), LibraryDataset (``ld``), or HistoryDatasetAssociation (``hda``). The map must be in the following format: ``{'<input>': {'id': <encoded dataset ID>, 'src': '[ldda, ld, hda]'}}`` (eg, ``{'23': {'id': '29beef4fadeed09f', 'src': 'ld'}}``) :type history_id: string :param history_id: The encoded history ID where to store the workflow output. ``history_id`` OR ``history_name`` should be provided but not both! :type history_name: string :param history_name: Create a new history with the given name to store the workflow output. ``history_id`` OR ``history_name`` should be provided but not both! :type import_inputs_to_history: bool :param import_inputs_to_history: If ``True``, used workflow inputs will be imported into the history. If ``False``, only workflow outputs will be visible in the given history. :rtype: dict :return: A dict containing the history ID where the outputs are placed as well as output dataset IDs. For example:: {u'history': u'64177123325c9cfd', u'outputs': [u'aa4d3084af404259']} """ payload = {} payload['workflow_id'] = workflow_id payload['ds_map'] = dataset_map if history_id: payload['history'] = 'hist_id={0}'.format(history_id) elif history_name: payload['history'] = history_name else: print "Must provide history_id or history_name argument" if import_inputs_to_history is False: payload['no_add_to_history'] = True return Client._post(self, payload)
def create_user(self, user_email): """ Create a new Galaxy user. .. note:: For this method to work, the Galaxy instance must have ``allow_user_creation`` and ``use_remote_user`` options set to ``True`` in the ``universe_wsgi.ini`` configuration file. Also note that setting ``use_remote_user`` will require an upstream authentication proxy server; however, if you do not have one, access to Galaxy via a browser will not be possible. """ payload = {} payload["remote_user_email"] = user_email return Client._post(self, payload)
def create_library(self, name, description=None, synopsis=None): """ Create a data library with the properties defined in the arguments. Return a list of JSON dicts, looking like so:: [{"id": "f740ab636b360a70", "name": "Library from blend", "url": "/api/libraries/f740ab636b360a70"}] """ payload = {"name": name} if description: payload["description"] = description if synopsis: payload["synopsis"] = synopsis return Client._post(self, payload)
def create_folder(self, library_id, folder_name, description=None, base_folder_id=None): """ Create a folder in the given library and the base folder. If ``base_folder_id`` is not provided, the new folder will be created in the root folder. """ # Get root folder ID if no ID was provided if base_folder_id is None: folders = self.show_library(library_id=library_id, contents=True) for f in folders: if f["name"] == "/": base_folder_id = f["id"] break # Compose the payload payload = {} payload["name"] = folder_name payload["folder_id"] = base_folder_id payload["create_type"] = "folder" if description is not None: payload["description"] = description return Client._post(self, payload, id=library_id, contents=True)