def get_object(bucket, key): """ Gets the object at key in the passed bucket Wraps the Acquire get_object function Args: bucket (dict): Bucket containing data key (str): Key for data in bucket Returns: Object: Object from store """ return ObjectStore.get_object(bucket, key)
def download(self, filename, authorisation, version=None, encrypt_key=None, force_par=False, must_chunk=False, par=None, identifiers=None): """Download the file called filename. This will return a FileHandle that describes the file. If the file is sufficiently small, then the filedata will be embedded into this handle. Otherwise a PAR will be generated and also returned to allow the file to be downloaded separately. The PAR will be encrypted with 'encrypt_key'. Remember to close the PAR once you have finished downloading the file... """ from Acquire.Storage import FileHandle as _FileHandle from Acquire.Storage import FileInfo as _FileInfo from Acquire.Crypto import PublicKey as _PublicKey from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import string_to_encoded \ as _string_to_encoded if not isinstance(encrypt_key, _PublicKey): raise TypeError("The encryption key must be of type PublicKey") (drive_acl, identifiers) = self._resolve_acl( authorisation=authorisation, resource="download %s %s" % (self._drive_uid, filename), par=par, identifiers=identifiers) # even if the drive_acl is not readable by this user, they # may have read permission for the file... # now get the FileInfo for this FileHandle fileinfo = _FileInfo.load(drive=self, filename=filename, version=version, identifiers=identifiers, upstream=drive_acl) # resolve the ACL for the file from this FileHandle filemeta = fileinfo.get_filemeta() file_acl = filemeta.acl() if not file_acl.is_readable(): raise PermissionError( "You do not have read permissions for the file. Your file " "permissions are %s" % str(file_acl)) file_bucket = self._get_file_bucket() file_key = fileinfo.version()._file_key() filedata = None downloader = None ospar = None if fileinfo.version().is_chunked(): # this is a chunked file. We need to return a # ChunkDownloader to download the file from Acquire.Client import ChunkDownloader as _ChunkDownloader downloader = _ChunkDownloader(drive_uid=self._drive_uid, file_uid=fileinfo.version().uid()) from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s/%s" % (_downloader_root, self._drive_uid, filemeta.uid(), downloader.uid()) data = { "filename": filename, "version": filemeta.uid(), "filekey": fileinfo.version()._file_key(), "secret": downloader.secret() } _ObjectStore.set_object_from_json(bucket, key, data) elif must_chunk: raise PermissionError( "Cannot download this file in a chunked manner!") elif force_par or fileinfo.filesize() > 1048576: # the file is too large to include in the download so # we need to use a OSPar to download ospar = _ObjectStore.create_par(bucket=file_bucket, encrypt_key=encrypt_key, key=file_key, readable=True, writeable=False) else: # one-trip download of files that are less than 1 MB filedata = _ObjectStore.get_object(file_bucket, file_key) # return the filemeta, and either the filedata, ospar or downloader return (filemeta, filedata, ospar, downloader)
def download_chunk(self, file_uid, downloader_uid, chunk_index, secret): """Download a chunk of the file with UID 'file_uid' at chunk index 'chunk_index'. This request is authenticated with the passed secret. The secret should be the multi_md5 has of the shared secret with the concatenated drive_uid, file_uid and chunk_index """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s/%s" % (_downloader_root, self._drive_uid, file_uid, downloader_uid) try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: raise PermissionError( "There is no downloader available to let you download " "this chunked file!") shared_secret = data["secret"] from Acquire.Crypto import Hash as _Hash shared_secret = _Hash.multi_md5( shared_secret, "%s%s%d" % (self._drive_uid, file_uid, chunk_index)) if secret != shared_secret: raise PermissionError( "Invalid chunked upload secret. You do not have permission " "to upload chunks to this file!") file_key = data["filekey"] chunk_index = int(chunk_index) file_bucket = self._get_file_bucket(file_key) data_key = "%s/data/%d" % (file_key, chunk_index) meta_key = "%s/meta/%d" % (file_key, chunk_index) num_chunks = None from Acquire.ObjectStore import ObjectStore as _ObjectStore try: meta = _ObjectStore.get_object_from_json(file_bucket, meta_key) except: meta = None if meta is None: # invalid read - see if the file has been closed? filename = data["filename"] version = data["version"] from Acquire.Storage import FileInfo as _FileInfo fileinfo = _FileInfo.load(drive=self, filename=filename, version=version) if fileinfo.version().is_uploading(): raise IndexError("Invalid chunk index") num_chunks = fileinfo.version().num_chunks() if chunk_index < 0: chunk_index = num_chunks + chunk_index if chunk_index < 0 or chunk_index > num_chunks: raise IndexError("Invalid chunk index") elif chunk_index == num_chunks: # signal we've reached the end of the file return (None, None, num_chunks) # we should be able to read this metadata... meta = _ObjectStore.get_object_from_json(file_bucket, meta_key) chunk = _ObjectStore.get_object(file_bucket, data_key) return (chunk, meta, num_chunks)