Ejemplo n.º 1
0
    def _archive_composite_dataset(self, trans, data, headers: Headers, do_action='zip'):
        # save a composite object into a compressed archive for downloading
        outfname = data.name[0:150]
        outfname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in outfname)
        archive = ZipstreamWrapper(
            archive_name=outfname,
            upstream_mod_zip=trans.app.config.upstream_mod_zip,
            upstream_gzip=trans.app.config.upstream_gzip
        )
        error = False
        msg = ''
        ext = data.extension
        path = data.file_name
        efp = data.extra_files_path
        # Add any central file to the archive,

        display_name = os.path.splitext(outfname)[0]
        if not display_name.endswith(ext):
            display_name = f'{display_name}_{ext}'

        error, msg = self._archive_main_file(archive, display_name, path)[:2]
        if not error:
            # Add any child files to the archive,
            for fpath, rpath in self.__archive_extra_files_path(extra_files_path=efp):
                try:
                    archive.write(fpath, rpath)
                except OSError:
                    error = True
                    log.exception("Unable to add %s to temporary library download archive", rpath)
                    msg = "Unable to create archive for download, please report this error"
                    continue
        if not error:
            headers.update(archive.get_headers())
            return archive, headers
        return trans.show_error_message(msg), headers
Ejemplo n.º 2
0
def stream_dataset_collection(dataset_collection_instance, upstream_mod_zip=False, upstream_gzip=False):
    archive_name = f"{dataset_collection_instance.hid}: {dataset_collection_instance.name}"
    archive = ZipstreamWrapper(
        archive_name=archive_name,
        upstream_mod_zip=upstream_mod_zip,
        upstream_gzip=upstream_gzip,
    )
    names, hdas = get_hda_and_element_identifiers(dataset_collection_instance)
    for name, hda in zip(names, hdas):
        if hda.state != hda.states.OK:
            continue
        for file_path, relpath in hda.datatype.to_archive(dataset=hda, name=name):
            archive.write(file_path, relpath)
    return archive
Ejemplo n.º 3
0
 def test_data_download(self, trans: GalaxyWebTransaction, id, **kwd):
     """
     GET /api/tools/{tool_id}/test_data_download?tool_version={tool_version}&filename={filename}
     """
     tool_version = kwd.get('tool_version', None)
     tool = self._get_tool(id, tool_version=tool_version, user=trans.user)
     filename = kwd.get("filename")
     if filename is None:
         raise exceptions.ObjectNotFound("Test data filename not specified.")
     path = tool.test_data_path(filename)
     if path:
         if os.path.isfile(path):
             trans.response.headers["Content-Disposition"] = f'attachment; filename="{filename}"'
             return open(path, mode='rb')
         elif os.path.isdir(path):
             # Set upstream_mod_zip to false, otherwise tool data must be among allowed internal routes
             archive = ZipstreamWrapper(
                 upstream_mod_zip=False,
                 upstream_gzip=self.app.config.upstream_gzip,
                 archive_name=filename,
             )
             archive.write(path)
             trans.response.headers.update(archive.get_headers())
             return archive.response()
     raise exceptions.ObjectNotFound("Specified test data path not found.")
Ejemplo n.º 4
0
    def download(self, trans, archive_format, **kwd):
        """
        GET /api/libraries/datasets/download/{archive_format}
        POST /api/libraries/datasets/download/{archive_format}

        Download requested datasets (identified by encoded IDs) in requested archive_format.

        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828``

        .. note:: supported archive_format values are: 'zip', 'tgz', 'tbz', 'uncompressed'

        :param  archive_format:      string representing requested archive archive_format
        :type   archive_format:      string
        :param  ld_ids[]:      an array of encoded dataset ids
        :type   ld_ids[]:      an array
        :param  folder_ids[]:      an array of encoded folder ids
        :type   folder_ids[]:      an array

        :returns: either archive with the requested datasets packed inside or a single uncompressed dataset
        :rtype:   file

        :raises: MessageException, ItemDeletionException, ItemAccessibilityException, HTTPBadRequest, OSError, IOError, ObjectNotFound
        """
        library_datasets = []
        datasets_to_download = kwd.get('ld_ids%5B%5D', None)
        if datasets_to_download is None:
            datasets_to_download = kwd.get('ld_ids', None)
        if datasets_to_download is not None:
            datasets_to_download = util.listify(datasets_to_download)
            for dataset_id in datasets_to_download:
                try:
                    library_dataset = self.get_library_dataset(trans, id=dataset_id, check_ownership=False, check_accessible=True)
                    library_datasets.append(library_dataset)
                except HTTPBadRequest:
                    raise exceptions.RequestParameterInvalidException('Bad Request.')
                except HTTPInternalServerError:
                    raise exceptions.InternalServerError('Internal error.')
                except Exception as e:
                    raise exceptions.InternalServerError(f"Unknown error.{util.unicodify(e)}")

        folders_to_download = kwd.get('folder_ids%5B%5D', None)
        if folders_to_download is None:
            folders_to_download = kwd.get('folder_ids', None)
        if folders_to_download is not None:
            folders_to_download = util.listify(folders_to_download)

            current_user_roles = trans.get_current_user_roles()

            def traverse(folder):
                admin = trans.user_is_admin
                rval = []
                for subfolder in folder.active_folders:
                    if not admin:
                        can_access, folder_ids = trans.app.security_agent.check_folder_contents(trans.user, current_user_roles, subfolder)
                    if (admin or can_access) and not subfolder.deleted:
                        rval.extend(traverse(subfolder))
                for ld in folder.datasets:
                    if not admin:
                        can_access = trans.app.security_agent.can_access_dataset(
                            current_user_roles,
                            ld.library_dataset_dataset_association.dataset
                        )
                    if (admin or can_access) and not ld.deleted:
                        rval.append(ld)
                return rval

            for encoded_folder_id in folders_to_download:
                folder_id = self.folder_manager.cut_and_decode(trans, encoded_folder_id)
                folder = self.folder_manager.get(trans, folder_id)
                library_datasets.extend(traverse(folder))

        if not library_datasets:
            raise exceptions.RequestParameterMissingException('Request has to contain a list of dataset ids or folder ids to download.')

        if archive_format == 'zip':
            archive = ZipstreamWrapper(
                archive_name="selected_library_files",
                upstream_mod_zip=self.app.config.upstream_mod_zip,
                upstream_gzip=self.app.config.upstream_gzip,
            )
            killme = string.punctuation + string.whitespace
            trantab = str.maketrans(killme, '_' * len(killme))
            seen = []
            for ld in library_datasets:
                ldda = ld.library_dataset_dataset_association
                is_composite = ldda.datatype.composite_type
                path = ""
                parent_folder = ldda.library_dataset.folder
                while parent_folder is not None:
                    # Exclude the now-hidden "root folder"
                    if parent_folder.parent is None:
                        path = os.path.join(parent_folder.library_root[0].name, path)
                        break
                    path = os.path.join(parent_folder.name, path)
                    parent_folder = parent_folder.parent
                path += ldda.name
                while path in seen:
                    path += '_'
                path = f"{path}.{ldda.extension}"
                seen.append(path)
                zpath = os.path.split(path)[-1]  # comes as base_name/fname
                outfname, zpathext = os.path.splitext(zpath)

                if is_composite:
                    # need to add all the components from the extra_files_path to the zip
                    if zpathext == '':
                        zpath = f'{zpath}.html'  # fake the real nature of the html file
                    try:
                        if archive_format == 'zip':
                            archive.write(ldda.dataset.file_name, zpath)  # add the primary of a composite set
                        else:
                            archive.write(ldda.dataset.file_name, zpath)  # add the primary of a composite set
                    except OSError:
                        log.exception("Unable to add composite parent %s to temporary library download archive", ldda.dataset.file_name)
                        raise exceptions.InternalServerError("Unable to create archive for download.")
                    except ObjectNotFound:
                        log.exception("Requested dataset %s does not exist on the host.", ldda.dataset.file_name)
                        raise exceptions.ObjectNotFound("Requested dataset not found. ")
                    except Exception as e:
                        log.exception("Unable to add composite parent %s to temporary library download archive", ldda.dataset.file_name)
                        raise exceptions.InternalServerError(f"Unable to add composite parent to temporary library download archive. {util.unicodify(e)}")

                    flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
                    for fpath in flist:
                        efp, fname = os.path.split(fpath)
                        if fname > '':
                            fname = fname.translate(trantab)
                        try:
                            archive.write(fpath, fname)
                        except OSError:
                            log.exception("Unable to add %s to temporary library download archive %s", fname, outfname)
                            raise exceptions.InternalServerError("Unable to create archive for download.")
                        except ObjectNotFound:
                            log.exception("Requested dataset %s does not exist on the host.", fpath)
                            raise exceptions.ObjectNotFound("Requested dataset not found.")
                        except Exception as e:
                            log.exception("Unable to add %s to temporary library download archive %s", fname, outfname)
                            raise exceptions.InternalServerError(f"Unable to add dataset to temporary library download archive . {util.unicodify(e)}")
                else:
                    try:
                        archive.write(ldda.dataset.file_name, path)
                    except OSError:
                        log.exception("Unable to write %s to temporary library download archive", ldda.dataset.file_name)
                        raise exceptions.InternalServerError("Unable to create archive for download")
                    except ObjectNotFound:
                        log.exception("Requested dataset %s does not exist on the host.", ldda.dataset.file_name)
                        raise exceptions.ObjectNotFound("Requested dataset not found.")
                    except Exception as e:
                        log.exception("Unable to add %s to temporary library download archive %s", ldda.dataset.file_name, outfname)
                        raise exceptions.InternalServerError(f"Unknown error. {util.unicodify(e)}")
            trans.response.headers.update(archive.get_headers())
            return archive.response()
        elif archive_format == 'uncompressed':
            if len(library_datasets) != 1:
                raise exceptions.RequestParameterInvalidException("You can download only one uncompressed file at once.")
            else:
                single_ld = library_datasets[0]
                ldda = single_ld.library_dataset_dataset_association
                dataset = ldda.dataset
                fStat = os.stat(dataset.file_name)
                trans.response.set_content_type(ldda.get_mime())
                trans.response.headers['Content-Length'] = str(fStat.st_size)
                fname = f"{ldda.name}.{ldda.extension}"
                fname = ''.join(c in util.FILENAME_VALID_CHARS and c or '_' for c in fname)[0:150]
                trans.response.headers["Content-Disposition"] = f'attachment; filename="{fname}"'
                try:
                    return open(dataset.file_name, 'rb')
                except Exception:
                    raise exceptions.InternalServerError("This dataset contains no content.")
        else:
            raise exceptions.RequestParameterInvalidException("Wrong archive_format parameter specified")