Example #1
0
    def upload_chunk(self, file_uid, chunk_index, secret, chunk, checksum):
        """Upload a chunk of the file with UID 'file_uid'. This is the
           chunk at index 'chunk_idx', which is set equal to 'chunk'
           (validated with 'checksum'). The passed secret is used to
           authenticate this upload. The secret should be the
           multi_md5 has of the shared secret with the concatenated
           drive_uid, file_uid and chunk_index
        """
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        bucket = _get_service_account_bucket()
        key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid)
        data = _ObjectStore.get_object_from_json(bucket, key)
        shared_secret = data["secret"]

        from Acquire.Crypto import Hash as _Hash
        shared_secret = _Hash.multi_md5(
            shared_secret, "%s%s%d" % (self._drive_uid, file_uid, chunk_index))

        if secret != shared_secret:
            raise PermissionError(
                "Invalid chunked upload secret. You do not have permission "
                "to upload chunks to this file!")

        # validate the data checksum
        check = _Hash.md5(chunk)

        if check != checksum:
            from Acquire.Storage import FileValidationError
            raise FileValidationError(
                "Invalid checksum for chunk: %s versus %s" % (check, checksum))

        meta = {
            "filesize": len(chunk),
            "checksum": checksum,
            "compression": "bz2"
        }

        file_key = data["filekey"]
        chunk_index = int(chunk_index)

        file_bucket = self._get_file_bucket(file_key)
        data_key = "%s/data/%d" % (file_key, chunk_index)
        meta_key = "%s/meta/%d" % (file_key, chunk_index)

        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        _ObjectStore.set_object_from_json(file_bucket, meta_key, meta)
        _ObjectStore.set_object(file_bucket, data_key, chunk)
Example #2
0
    def upload(self,
               filehandle,
               authorisation=None,
               encrypt_key=None,
               par=None,
               identifiers=None):
        """Upload the file associated with the passed filehandle.
           If the filehandle has the data embedded, then this uploads
           the file data directly and returns a FileMeta for the
           result. Otherwise, this returns a PAR which should
           be used to upload the data. The PAR will be encrypted
           using 'encrypt_key'. Remember to close the PAR once the
           file has been uploaded, so that it can be validated
           as correct
        """
        from Acquire.Storage import FileHandle as _FileHandle
        from Acquire.Storage import FileInfo as _FileInfo
        from Acquire.Crypto import PublicKey as _PublicKey
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.ObjectStore import string_to_encoded \
            as _string_to_encoded

        if not isinstance(filehandle, _FileHandle):
            raise TypeError("The fileinfo must be of type FileInfo")

        if encrypt_key is not None:
            if not isinstance(encrypt_key, _PublicKey):
                raise TypeError("The encryption key must be of type PublicKey")

        (drive_acl, identifiers) = self._resolve_acl(
            authorisation=authorisation,
            resource="upload %s" % filehandle.fingerprint(),
            par=par,
            identifiers=identifiers)

        if not drive_acl.is_writeable():
            raise PermissionError(
                "You do not have permission to write to this drive. "
                "Your permissions are %s" % str(drive_acl))

        # now generate a FileInfo for this FileHandle
        fileinfo = _FileInfo(drive_uid=self._drive_uid,
                             filehandle=filehandle,
                             identifiers=identifiers,
                             upstream=drive_acl)

        # resolve the ACL for the file from this FileHandle
        filemeta = fileinfo.get_filemeta()
        file_acl = filemeta.acl()

        if not file_acl.is_writeable():
            raise PermissionError(
                "Despite having write permission to the drive, you "
                "do not have write permission for the file. Your file "
                "permissions are %s" % str(file_acl))

        file_key = fileinfo.latest_version()._file_key()
        file_bucket = self._get_file_bucket(file_key)

        filedata = None

        if filehandle.is_localdata():
            # the filehandle already contains the file, so save it
            # directly
            filedata = filehandle.local_filedata()

        _ObjectStore.set_object(bucket=file_bucket,
                                key=file_key,
                                data=filedata)

        if filedata is None:
            # the file is too large to include in the filehandle so
            # we need to use a OSPar to upload
            from Acquire.ObjectStore import Function as _Function

            f = _Function(function=_validate_file_upload,
                          file_bucket=self._get_file_bucketname(),
                          file_key=file_key,
                          objsize=fileinfo.filesize(),
                          checksum=fileinfo.checksum())

            ospar = _ObjectStore.create_par(bucket=file_bucket,
                                            encrypt_key=encrypt_key,
                                            key=file_key,
                                            readable=False,
                                            writeable=True,
                                            cleanup_function=f)
        else:
            ospar = None

        # now save the fileinfo to the object store
        fileinfo.save()
        filemeta = fileinfo.get_filemeta()

        # return the PAR if we need to have a second-stage of upload
        return (filemeta, ospar)