def close_uploader(self, file_uid, secret): """Close the uploader associated with the passed file_uid, authenticated using the passed secret """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid) try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: # the uploader has already been closed return shared_secret = data["secret"] if secret != shared_secret: raise PermissionError( "Invalid request - you do not have permission to " "close this uploader") try: data2 = _ObjectStore.take_object_from_json(bucket, key) except: data2 = None if data2 is None: # someone else is already in the process of closing # this uploader - let them do it! return filename = data["filename"] version = data["version"] # now get the FileInfo for this file from Acquire.Storage import FileInfo as _FileInfo fileinfo = _FileInfo.load(drive=self, filename=filename, version=version) file_key = data["filekey"] file_bucket = self._get_file_bucket(file_key) fileinfo.close_uploader(file_bucket=file_bucket) fileinfo.save()
def list_versions(self, filename, authorisation=None, include_metadata=False, par=None, identifiers=None): """Return the list of versions of the file with specified filename. If 'include_metadata' is true then this will load full metadata for each version. This will return a sorted list of FileMeta objects. The passed authorisation is needed in case the version info is not public """ (drive_acl, identifiers) = self._resolve_acl( authorisation=authorisation, resource="list_versions %s" % filename, par=par, identifiers=identifiers) if not drive_acl.is_readable(): raise PermissionError( "You don't have permission to read this Drive") from Acquire.Storage import FileInfo as _FileInfo versions = _FileInfo.list_versions(drive=self, filename=filename, identifiers=identifiers, upstream=drive_acl, include_metadata=include_metadata) result = [] for version in versions: aclrules = version.aclrules() if aclrules is not None: acl = aclrules.resolve(upstream=drive_acl, identifiers=identifiers, must_resolve=True, unresolved=False) if acl.is_readable() or acl.is_writeable(): result.append(version) else: result.append(version) # return the versions sorted in upload order result.sort(key=lambda x: x.uploaded_when()) return result
def list_files(self, authorisation=None, par=None, identifiers=None, include_metadata=False, dir=None, filename=None): """Return the list of FileMeta data for the files contained in this Drive. The passed authorisation is needed in case the list contents of this drive is not public. If 'dir' is specified, then only search for files in 'dir'. If 'filename' is specified, then only search for the file called 'filename' """ (drive_acl, identifiers) = self._resolve_acl(authorisation=authorisation, resource="list_files", par=par, identifiers=identifiers) if par is not None: if par.location().is_file(): dir = None filename = par.location().filename() elif not par.location().is_drive(): raise PermissionError( "You do not have permission to read the Drive") if not drive_acl.is_readable(): raise PermissionError( "You don't have permission to read this Drive") from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import encoded_to_string as _encoded_to_string from Acquire.ObjectStore import string_to_encoded as _string_to_encoded from Acquire.Storage import FileMeta as _FileMeta metadata_bucket = self._get_metadata_bucket() if filename is not None: if dir is not None: filename = "%s/%s" % (dir, filename) key = "%s/%s/%s" % (_fileinfo_root, self._drive_uid, _string_to_encoded(filename)) names = [key] elif dir is not None: while dir.endswith("/"): dir = dir[0:-1] encoded_dir = _string_to_encoded(dir) while encoded_dir.endswith("="): encoded_dir = encoded_dir[0:-1] # remove the last two characters, as sometime uuencoding # will change the last characters so they don't match if len(encoded_dir) > 2: encoded_dir = encoded_dir[0:-2] else: encoded_dir = "" key = "%s/%s/%s" % (_fileinfo_root, self._drive_uid, encoded_dir) all_names = _ObjectStore.get_all_object_names(metadata_bucket, key) names = [] dir = "%s/" % dir for name in all_names: decoded_name = _encoded_to_string(name.split("/")[-1]) if decoded_name.startswith(dir): names.append(name) else: key = "%s/%s" % (_fileinfo_root, self._drive_uid) names = _ObjectStore.get_all_object_names(metadata_bucket, key) files = [] if include_metadata: # we need to load all of the metadata info for this file to # return to the user from Acquire.Storage import FileInfo as _FileInfo for name in names: try: data = _ObjectStore.get_object_from_json( metadata_bucket, name) fileinfo = _FileInfo.from_data(data, identifiers=identifiers, upstream=drive_acl) filemeta = fileinfo.get_filemeta() file_acl = filemeta.acl() if file_acl.is_readable() or file_acl.is_writeable(): files.append(filemeta) except: pass else: for name in names: filename = _encoded_to_string(name.split("/")[-1]) files.append(_FileMeta(filename=filename)) return files
def download(self, filename, authorisation, version=None, encrypt_key=None, force_par=False, must_chunk=False, par=None, identifiers=None): """Download the file called filename. This will return a FileHandle that describes the file. If the file is sufficiently small, then the filedata will be embedded into this handle. Otherwise a PAR will be generated and also returned to allow the file to be downloaded separately. The PAR will be encrypted with 'encrypt_key'. Remember to close the PAR once you have finished downloading the file... """ from Acquire.Storage import FileHandle as _FileHandle from Acquire.Storage import FileInfo as _FileInfo from Acquire.Crypto import PublicKey as _PublicKey from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import string_to_encoded \ as _string_to_encoded if not isinstance(encrypt_key, _PublicKey): raise TypeError("The encryption key must be of type PublicKey") (drive_acl, identifiers) = self._resolve_acl( authorisation=authorisation, resource="download %s %s" % (self._drive_uid, filename), par=par, identifiers=identifiers) # even if the drive_acl is not readable by this user, they # may have read permission for the file... # now get the FileInfo for this FileHandle fileinfo = _FileInfo.load(drive=self, filename=filename, version=version, identifiers=identifiers, upstream=drive_acl) # resolve the ACL for the file from this FileHandle filemeta = fileinfo.get_filemeta() file_acl = filemeta.acl() if not file_acl.is_readable(): raise PermissionError( "You do not have read permissions for the file. Your file " "permissions are %s" % str(file_acl)) file_bucket = self._get_file_bucket() file_key = fileinfo.version()._file_key() filedata = None downloader = None ospar = None if fileinfo.version().is_chunked(): # this is a chunked file. We need to return a # ChunkDownloader to download the file from Acquire.Client import ChunkDownloader as _ChunkDownloader downloader = _ChunkDownloader(drive_uid=self._drive_uid, file_uid=fileinfo.version().uid()) from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s/%s" % (_downloader_root, self._drive_uid, filemeta.uid(), downloader.uid()) data = { "filename": filename, "version": filemeta.uid(), "filekey": fileinfo.version()._file_key(), "secret": downloader.secret() } _ObjectStore.set_object_from_json(bucket, key, data) elif must_chunk: raise PermissionError( "Cannot download this file in a chunked manner!") elif force_par or fileinfo.filesize() > 1048576: # the file is too large to include in the download so # we need to use a OSPar to download ospar = _ObjectStore.create_par(bucket=file_bucket, encrypt_key=encrypt_key, key=file_key, readable=True, writeable=False) else: # one-trip download of files that are less than 1 MB filedata = _ObjectStore.get_object(file_bucket, file_key) # return the filemeta, and either the filedata, ospar or downloader return (filemeta, filedata, ospar, downloader)
def download_chunk(self, file_uid, downloader_uid, chunk_index, secret): """Download a chunk of the file with UID 'file_uid' at chunk index 'chunk_index'. This request is authenticated with the passed secret. The secret should be the multi_md5 has of the shared secret with the concatenated drive_uid, file_uid and chunk_index """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s/%s" % (_downloader_root, self._drive_uid, file_uid, downloader_uid) try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: raise PermissionError( "There is no downloader available to let you download " "this chunked file!") shared_secret = data["secret"] from Acquire.Crypto import Hash as _Hash shared_secret = _Hash.multi_md5( shared_secret, "%s%s%d" % (self._drive_uid, file_uid, chunk_index)) if secret != shared_secret: raise PermissionError( "Invalid chunked upload secret. You do not have permission " "to upload chunks to this file!") file_key = data["filekey"] chunk_index = int(chunk_index) file_bucket = self._get_file_bucket(file_key) data_key = "%s/data/%d" % (file_key, chunk_index) meta_key = "%s/meta/%d" % (file_key, chunk_index) num_chunks = None from Acquire.ObjectStore import ObjectStore as _ObjectStore try: meta = _ObjectStore.get_object_from_json(file_bucket, meta_key) except: meta = None if meta is None: # invalid read - see if the file has been closed? filename = data["filename"] version = data["version"] from Acquire.Storage import FileInfo as _FileInfo fileinfo = _FileInfo.load(drive=self, filename=filename, version=version) if fileinfo.version().is_uploading(): raise IndexError("Invalid chunk index") num_chunks = fileinfo.version().num_chunks() if chunk_index < 0: chunk_index = num_chunks + chunk_index if chunk_index < 0 or chunk_index > num_chunks: raise IndexError("Invalid chunk index") elif chunk_index == num_chunks: # signal we've reached the end of the file return (None, None, num_chunks) # we should be able to read this metadata... meta = _ObjectStore.get_object_from_json(file_bucket, meta_key) chunk = _ObjectStore.get_object(file_bucket, data_key) return (chunk, meta, num_chunks)