Example #1
0
    def update_repository(self, id, tar_ball_path, commit_message=None):
        """
        Update the contents of a Tool Shed repository with specified tar ball.

        :type id: str
        :param id: Encoded repository ID

        :type tar_ball_path: str
        :param tar_ball_path: Path to file containing tar ball to upload.

        :type commit_message: str
        :param commit_message: Commit message used for the underlying Mercurial
          repository backing Tool Shed repository.

        :rtype: dict
        :return: Returns a dictionary that includes repository content warnings.
          Most valid uploads will result in no such warning and an exception
          will be raised generally if there are problems.
          For example a successful upload will look like::

            {'content_alert': '',
             'message': ''}

        .. versionadded:: 0.5.2
        """
        url = self._make_url(id) + '/changeset_revision'
        payload = {
            'file': attach_file(tar_ball_path)
        }
        if commit_message is not None:
            payload['commit_message'] = commit_message
        try:
            return self._post(payload=payload, files_attached=True, url=url)
        finally:
            payload['file'].close()
Example #2
0
    def upload_file(self, path, history_id, **keywords):
        """
        Upload file specified by ``path`` to the history specified by
        ``history_id``.

        :type path: str
        :param path: path of the file to upload

        :type history_id: str
        :param history_id: id of the history where to upload the file

        :type file_name: str
        :param file_name: (optional) name of the new history dataset

        :type file_type: str
        :param file_type: Galaxy datatype for the new dataset, default is auto

        :type dbkey: str
        :param dbkey: (optional) genome dbkey
        """
        default_file_name = basename(path)
        if "file_name" not in keywords:
            keywords["file_name"] = default_file_name
        payload = self._upload_payload(history_id, **keywords)
        payload["files_0|file_data"] = attach_file(path, name=keywords["file_name"])
        try:
            return self._tool_post(payload, files_attached=True)
        finally:
            payload["files_0|file_data"].close()
Example #3
0
    def update_repository(self, id, tar_ball_path, commit_message=None):
        """
        Update the contents of a Tool Shed repository with specified tar ball.

        :type id: str
        :param id: Encoded repository ID

        :type tar_ball_path: str
        :param tar_ball_path: Path to file containing tar ball to upload.

        :type commit_message: str
        :param commit_message: Commit message used for the underlying Mercurial
          repository backing Tool Shed repository.

        :rtype: dict
        :return: Returns a dictionary that includes repository content warnings.
          Most valid uploads will result in no such warning and an exception
          will be raised generally if there are problems.
          For example a successful upload will look like::

            {u'content_alert': u'',
             u'message': u''}

        .. versionadded:: 0.5.2
        """
        url = '/'.join([self.gi._make_url(self, id), 'changeset_revision'])
        payload = {
            'file': attach_file(tar_ball_path)
        }
        if commit_message is not None:
            payload['commit_message'] = commit_message
        try:
            return self._post(payload=payload, files_attached=True, url=url)
        finally:
            payload['file'].close()
Example #4
0
 def _attach_file(upload_payload, uri, index=0):
     uri = path_or_uri_to_uri(uri)
     is_path = uri.startswith("file://")
     if not is_path or config.use_path_paste:
         upload_payload["inputs"]["files_%d|url_paste" % index] = uri
     else:
         files_attached[0] = True
         path = uri[len("file://"):]
         upload_payload["files_%d|file_data" % index] = attach_file(path)
Example #5
0
 def _attach_file(upload_payload, uri, index=0):
     uri = path_or_uri_to_uri(uri)
     is_path = uri.startswith("file://")
     if not is_path or config.use_path_paste:
         upload_payload["inputs"]["files_%d|url_paste" % index] = uri
     else:
         files_attached[0] = True
         path = uri[len("file://"):]
         upload_payload["files_%d|file_data" % index] = attach_file(path)
Example #6
0
    def _do_upload(self, **keywords):
        """
        Set up the POST request and do the actual data upload to a data library.
        This method should not be called directly but instead refer to the methods
        specific for the desired type of data upload.
        """
        library_id = keywords['library_id']
        folder_id = keywords.get('folder_id', None)
        if folder_id is None:
            folder_id = self._get_root_folder_id(library_id)
        files_attached = False
        # Compose the payload dict
        payload = {}
        payload['folder_id'] = folder_id
        payload['file_type'] = keywords.get('file_type', 'auto')
        payload['dbkey'] = keywords.get('dbkey', '?')
        payload['create_type'] = 'file'
        if keywords.get("roles", None):
            payload["roles"] = keywords["roles"]
        if keywords.get("link_data_only", None) and keywords['link_data_only'] != 'copy_files':
            payload["link_data_only"] = 'link_to_files'
        if keywords.get('remote_dataset', None):
            payload['remote_dataset'] = keywords['remote_dataset'];
        if keywords.get('uuid_list', None):
            payload['uuid_list'] = keywords['uuid_list'];
        if keywords.get('remote_dataset_type_list', None):
            payload['remote_dataset_type_list'] = keywords['remote_dataset_type_list'];
        if keywords.get('file_size_list', None):
            payload['file_size_list'] = keywords['file_size_list'];
        if keywords.get('line_count_list', None):
            payload['line_count_list'] = keywords['line_count_list'];
        # upload options
        if keywords.get('file_url', None) is not None:
            payload['upload_option'] = 'upload_file'
            payload['files_0|url_paste'] = keywords['file_url']
        elif keywords.get('pasted_content', None) is not None:
            payload['upload_option'] = 'upload_file'
            payload['files_0|url_paste'] = keywords['pasted_content']
        elif keywords.get('server_dir', None) is not None:
            payload['upload_option'] = 'upload_directory'
            payload['server_dir'] = keywords['server_dir']
        elif keywords.get('file_local_path', None) is not None:
            payload['upload_option'] = 'upload_file'
            payload['files_0|file_data'] = attach_file(keywords['file_local_path'])
            files_attached = True
        elif keywords.get("filesystem_paths", None) is not None:
            payload["upload_option"] = "upload_paths"
            payload["filesystem_paths"] = keywords["filesystem_paths"]

        try:
            return Client._post(self, payload, id=library_id, contents=True,
                                files_attached=files_attached)
        finally:
            if payload.get('files_0|file_data', None) is not None:
                payload['files_0|file_data'].close()
Example #7
0
    def _do_upload(self, library_id, **keywords):
        """
        Set up the POST request and do the actual data upload to a data library.
        This method should not be called directly but instead refer to the
        methods specific for the desired type of data upload.
        """
        folder_id = keywords.get('folder_id', None)
        if folder_id is None:
            folder_id = self._get_root_folder_id(library_id)
        files_attached = False
        # Compose the payload dict
        payload = {}
        payload['folder_id'] = folder_id
        payload['file_type'] = keywords.get('file_type', 'auto')
        payload['dbkey'] = keywords.get('dbkey', '?')
        payload['create_type'] = 'file'
        if keywords.get("roles", None):
            payload["roles"] = keywords["roles"]
        if keywords.get("link_data_only",
                        None) and keywords['link_data_only'] != 'copy_files':
            payload["link_data_only"] = 'link_to_files'
        payload['tag_using_filenames'] = keywords.get('tag_using_filenames',
                                                      False)
        if keywords.get('tags'):
            payload['tags'] = keywords['tags']
        payload['preserve_dirs'] = keywords.get('preserve_dirs', False)
        # upload options
        if keywords.get('file_url', None) is not None:
            payload['upload_option'] = 'upload_file'
            payload['files_0|url_paste'] = keywords['file_url']
        elif keywords.get('pasted_content', None) is not None:
            payload['upload_option'] = 'upload_file'
            payload['files_0|url_paste'] = keywords['pasted_content']
        elif keywords.get('server_dir', None) is not None:
            payload['upload_option'] = 'upload_directory'
            payload['server_dir'] = keywords['server_dir']
        elif keywords.get('file_local_path', None) is not None:
            payload['upload_option'] = 'upload_file'
            payload['files_0|file_data'] = attach_file(
                keywords['file_local_path'])
            files_attached = True
        elif keywords.get("filesystem_paths", None) is not None:
            payload["upload_option"] = "upload_paths"
            payload["filesystem_paths"] = keywords["filesystem_paths"]

        try:
            return self._post(payload,
                              id=library_id,
                              contents=True,
                              files_attached=files_attached)
        finally:
            if payload.get('files_0|file_data', None) is not None:
                payload['files_0|file_data'].close()
Example #8
0
    def import_history(self, file_path=None, url=None):
        """
        Import a history from an archive on disk or a URL.
        :type file_path: str
        :param file_path: Path to exported history archive on disk.
        :type url: str
        :param url: URL for an exported history archive
        """
        if file_path:
            archive_file = attach_file(file_path)
            payload = dict(archive_source='', archive_file=archive_file, archive_type="file")
        else:
            payload = dict(archive_source=url, archive_type='url')

        return self._post(payload=payload, files_attached=file_path is not None)
Example #9
0
    def upload_file(self, path, history_id, **keywords):
        """
        Upload the file specified by ``path`` to the history specified by
        ``history_id``.

        :type path: str
        :param path: path of the file to upload

        :type history_id: str
        :param history_id: id of the history where to upload the file

        :type file_name: str
        :param file_name: (optional) name of the new history dataset

        :type file_type: str
        :param file_type: (optional) Galaxy datatype for the new dataset, default is auto

        :type dbkey: str
        :param dbkey: (optional) genome dbkey

        :type to_posix_lines: bool
        :param to_posix_lines: if ``True`` (the default), convert universal line
          endings to POSIX line endings. Set to ``False`` when uploading a gzip,
          bz2 or zip archive containing a binary file

        :type space_to_tab: bool
        :param space_to_tab: whether to convert spaces to tabs. Default is
          ``False``. Applicable only if to_posix_lines is ``True``

        :rtype: dict
        :return: Information about the created upload job
        """
        if "file_name" not in keywords:
            keywords["file_name"] = basename(path)
        payload = self._upload_payload(history_id, **keywords)
        payload["files_0|file_data"] = attach_file(path,
                                                   name=keywords["file_name"])
        try:
            return self._post(payload, files_attached=True)
        finally:
            payload["files_0|file_data"].close()
Example #10
0
    def upload_file(self, path, history_id, **keywords):
        """
        Upload the file specified by ``path`` to the history specified by
        ``history_id``.

        :type path: str
        :param path: path of the file to upload

        :type history_id: str
        :param history_id: id of the history where to upload the file

        :type file_name: str
        :param file_name: (optional) name of the new history dataset

        :type file_type: str
        :param file_type: (optional) Galaxy datatype for the new dataset, default is auto

        :type dbkey: str
        :param dbkey: (optional) genome dbkey

        :type to_posix_lines: bool
        :param to_posix_lines: if ``True`` (the default), convert universal line
          endings to POSIX line endings. Set to ``False`` when uploading a gzip,
          bz2 or zip archive containing a binary file

        :type space_to_tab: bool
        :param space_to_tab: whether to convert spaces to tabs. Default is
          ``False``. Applicable only if to_posix_lines is ``True``

        :rtype: dict
        :return: Information about the created upload job
        """
        if "file_name" not in keywords:
            keywords["file_name"] = basename(path)
        payload = self._upload_payload(history_id, **keywords)
        payload["files_0|file_data"] = attach_file(path, name=keywords["file_name"])
        try:
            return self._post(payload, files_attached=True)
        finally:
            payload["files_0|file_data"].close()
Example #11
0
 def _attach_file(self, path):
     return attach_file(path)