def list_drives(self, drive_uid=None): """Return a list of all of the top-level drives to which this user has access, or all of the sub-drives of the drive with passed 'drive_uid' """ if self.is_null(): return [] from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import encoded_to_string as _encoded_to_string from Acquire.Storage import DriveMeta as _DriveMeta bucket = _get_service_account_bucket() if drive_uid is None: # look for the top-level drives names = _ObjectStore.get_all_object_names( bucket, "%s/%s" % (_drives_root, self._user_guid)) else: # look for the subdrives names = _ObjectStore.get_all_object_names( bucket, "%s/%s/%s" % (_subdrives_root, self._user_guid, drive_uid)) drives = [] for name in names: drive_name = _encoded_to_string(name.split("/")[-1]) drives.append(_DriveMeta(name=drive_name, container=drive_uid)) return drives
def contains(self, account, bucket=None): """Return whether or not this group contains the passed account Args: account (:obj:`Account`): Account to check against group bucket (dict, default=None): Bucket to load data from Returns: bool : True if account in group, else False """ self._assert_is_readable() from Acquire.Accounting import Account as _Account if not isinstance(account, _Account): raise TypeError("The passed account must be of type Account") if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() # read the UID of the account in this group that matches the # passed account's name try: from Acquire.ObjectStore import ObjectStore as _ObjectStore account_uid = _ObjectStore.get_string_object( bucket, self._account_key(account.name())) except: account_uid = None return account.uid() == account_uid
def register(par, url_checksum, details_function, cleanup_function=None): """Register the passed PAR, passing in the checksum of the PAR's secret URL (so we can verify the close), and optionally supplying a cleanup_function that is called when the PAR is closed. The passed 'details_function' should be used to extract the object-store driver-specific details from the PAR and convert them into a dictionary. The signature should be; driver_details = details_function(par) """ from Acquire.Service import is_running_service as _is_running_service if not _is_running_service(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import OSPar as _OSPar from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import Function as _Function from Acquire.ObjectStore import datetime_to_string \ as _datetime_to_string if par is None: return if not isinstance(par, _OSPar): raise TypeError("You can only register pars of type PAR") if par.is_null(): return data = {} data["par"] = par.to_data() if details_function is None: data["driver_details"] = par._driver_details else: data["driver_details"] = details_function(par) data["url_checksum"] = url_checksum if cleanup_function is not None: if not isinstance(cleanup_function, _Function): cleanup_function = _Function(cleanup_function) data["cleanup_function"] = cleanup_function.to_data() expire_string = _datetime_to_string(par.expires_when()) key = "%s/uid/%s/%s" % (_registry_key, par.uid(), expire_string) bucket = _get_service_account_bucket() _ObjectStore.set_object_from_json(bucket, key, data) key = "%s/expire/%s/%s" % (_registry_key, expire_string, par.uid()) _ObjectStore.set_object_from_json(bucket, key, par.uid())
def __init__(self, key=None, timeout=10, lease_time=10, bucket=None): """Create the mutex. The immediately tries to lock the mutex for key 'key' and will block until a lock is successfully obtained (or until 'timeout' seconds has been reached, and an exception is then thrown). If the key is provided, then this is the (single) global mutex. Note that this is really a lease, as the mutex will only be held for a maximum of 'lease_time' seconds. After this time the mutex will be automatically unlocked and made available to lock by others. You can renew the lease by re-locking the mutex. """ if key is None: key = "mutexes/none" else: key = "mutexes/%s" % str(key).replace(" ", "_") if bucket is None: from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket bucket = _get_service_account_bucket() self._bucket = bucket self._key = key self._secret = str(uuid.uuid4()) self._is_locked = 0 self.lock(timeout, lease_time)
def get_cluster(): """Return a handle to the single compute cluster that is connected to this compute service """ if not Cluster._is_running_service(): raise PermissionError( "You can only call 'get_cluster' on the compute service") from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "compute/cluster" try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: from Acquire.Service import ServiceError raise ServiceError( "You have not set the cluster that will be used to actually " "run the compute jobs!") return Cluster.from_data(data)
def untrust_service(service): """Stop trusting the passed service. This will remove the service as being trusted. You must pass in a valid admin_user authorisation for this service """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded bucket = _get_service_account_bucket() urlkey = "_trusted/url/%s" % _url_to_encoded(service.canonical_url()) uidkey = "_trusted/uid/%s" % service.uid() # delete the trusted service by both canonical_url and uid try: _ObjectStore.delete_object(bucket, uidkey) except: pass try: _ObjectStore.delete_object(bucket, urlkey) except: pass from Acquire.Service import clear_services_cache \ as _clear_services_cache _clear_services_cache() else: from Acquire.Client import Wallet as _Wallet wallet = _Wallet() wallet.remove_service(service)
def get_service_user_account_uid(accounting_service_uid): """Return the UID of the financial Acquire.Accounting.Account that is held on the accounting service with UID 'accounting_service_uid' for the service user on this service. This is the account to which payment for this service should be sent """ assert_running_service() from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore bucket = _get_service_account_bucket() key = "%s/account/%s" % (_service_key, accounting_service_uid) try: account_uid = _ObjectStore.get_string_object(bucket, key) except: account_uid = None if account_uid is None: from Acquire.Service import ServiceAccountError raise ServiceAccountError( "This service does not have a valid financial account on " "the accounting service at '%s'" % accounting_service_uid) return account_uid
def load(self, par_uid, secret=None): """Load and return the PAR and identifiers associated with the passed UID, locked with the passed secret """ # validate that the UID actually looks like a UID. This # should prevent attacks that try weird UIDs from Acquire.ObjectStore import validate_is_uid \ as _validate_is_uid _validate_is_uid(par_uid) from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket try: key = "%s/%s" % (_par_root, par_uid) bucket = _get_service_account_bucket() data = _ObjectStore.get_object_from_json(bucket, key) from Acquire.Client import PAR as _PAR import json as _json par = _PAR.from_data(data["par"]) identifiers = _json.loads(data["identifiers"]) if secret != data["secret"]: raise PermissionError() except: raise PermissionError("There is no valid PAR at ID '%s'" % par_uid) if par.expired(): raise PermissionError( "There is no valid PAR at ID '%s' as it has expired" % par_uid) return (par, identifiers)
def close_downloader(self, downloader_uid, file_uid, secret): """Close the downloader associated with the passed downloader_uid and file_uid, authenticated using the passed secret """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s/%s" % (_downloader_root, self._drive_uid, file_uid, downloader_uid) try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: # the downloader has already been closed return shared_secret = data["secret"] if secret != shared_secret: raise PermissionError( "Invalid request - you do not have permission to " "close this downloader") try: _ObjectStore.take_object_from_json(bucket, key) except: pass
def get_pending_job_uids(self, passphrase=None): """Return the UIDs of all of the jobs that need to be submitted""" if self.is_null(): return [] if Cluster._is_running_service(): from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket self.verify_passphrase(resource="get_pending_job_uids", passphrase=passphrase) bucket = _get_service_account_bucket() prefix = "compute/pending/" uids = _ObjectStore.get_all_object_names(bucket=bucket, prefix=prefix, without_prefix=True) return uids else: passphrase = self.passphrase(resource="get_pending_job_uids") args = {"passphrase": passphrase} result = self.compute_service().call_function( function="get_pending_job_uids", args=args) return self.decrypt_data(result["job_uids"])
def save_transaction(record, bucket=None): """Save the passed transaction record to the object store Args: record (TransactionRecord): To save bucket (dict, default=None): Bucket to save data from Returns: None """ from Acquire.Accounting import TransactionRecord as _TransactionRecord if not isinstance(record, _TransactionRecord): raise TypeError("You can only write TransactionRecord objects " "to the ledger!") if not record.is_null(): if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(bucket, Ledger.get_key(record.uid()), record.to_data())
def load_transaction(uid, bucket=None): """Load the transactionrecord with UID=uid from the ledger Args: uid (str): UID of transaction to load bucket (dict, default=None): Bucket to load data from Returns: TransactionRecord: Transaction with that UID """ if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() from Acquire.Accounting import TransactionRecord as _TransactionRecord from Acquire.ObjectStore import ObjectStore as _ObjectStore data = _ObjectStore.get_object_from_json(bucket, Ledger.get_key(uid)) if data is None: from Acquire.Accounting import LedgerError raise LedgerError("There is no transaction recorded in the " "ledger with UID=%s (at key %s)" % (uid, Ledger.get_key(uid))) return _TransactionRecord.from_data(data)
def submit_job(self, uid): """Submit the job with specified UID to this cluster. On the service this will put the UID of the job into the "pending" pool, and will signal the cluster to pull that job On the client this will pull the job with that UID from the pending pool, moving it to the "submitting" pool and will pass this job to the cluster submission system """ if Cluster._is_running_service(): from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import get_datetime_now_to_string \ as _get_datetime_now_to_string bucket = _get_service_account_bucket() key = "compute/pending/%s" % uid resource = {"pending": _get_datetime_now_to_string(), "uid": uid} _ObjectStore.set_object_from_json(bucket, key, resource) else: # fetch the pending job and change the status to "submitting" return self.get_job(uid=uid, start_state="pending", end_state="submitting")
def save_service_keys_to_objstore(include_old_keys=False): """Call this function to ensure that the current set of keys used for this service are saved to object store """ service = get_this_service(need_private_access=True) oldkeys = service.dump_keys(include_old_keys=include_old_keys) # now write the old keys to storage from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/oldkeys/%s" % (_service_key, oldkeys["datetime"]) _ObjectStore.set_object_from_json(bucket, key, oldkeys) # now write the pointers from fingerprint to file... for fingerprint in oldkeys.keys(): if fingerprint not in ["datetime", "encrypted_passphrase"]: _ObjectStore.set_string_object( bucket, "%s/oldkeys/fingerprints/%s" % (_service_key, fingerprint), key)
def trust_service(service): """Trust the passed service. This will record this service as trusted, e.g. saving the keys and certificates for this service and allowing it to be used for the specified type. """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded bucket = _get_service_account_bucket() urlkey = "_trusted/url/%s" % _url_to_encoded(service.canonical_url()) uidkey = "_trusted/uid/%s" % service.uid() service_data = service.to_data() # store the trusted service by both canonical_url and uid from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(bucket, uidkey, service_data) _ObjectStore.set_string_object(bucket, urlkey, uidkey) from Acquire.Service import clear_services_cache \ as _clear_services_cache _clear_services_cache() else: from Acquire.Client import Wallet as _Wallet wallet = _Wallet() wallet.add_service(service)
def _validate_file_upload(par, file_bucket, file_key, objsize, checksum): """Call this function to signify that the file associated with this PAR has been uploaded. This will check that the objsize and checksum match with what was promised """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.Service import get_this_service as _get_this_service service = _get_this_service() bucket = _get_service_account_bucket() file_bucket = _ObjectStore.get_bucket(bucket=bucket, bucket_name=file_bucket, create_if_needed=True) # check that the file uploaded matches what was promised (real_objsize, real_checksum) = _ObjectStore.get_size_and_checksum( file_bucket, file_key) if real_objsize != objsize or real_checksum != checksum: # probably should delete the broken object here... from Acquire.Storage import FileValidationError raise FileValidationError( "The file uploaded does not match what was promised. " "size: %s versus %s, checksum: %s versus %s. Please try " "to upload the file again." % (real_objsize, objsize, real_checksum, checksum))
def close_par(par=None, par_uid=None, url_checksum=None): """Close the passed OSPar, which provides access to data in the passed bucket Args: par (OSPar, default=None): OSPar to close bucket par_uid (str, default=None): UID for OSPar url_checksum (str, default=None): Checksum to pass to PARRegistry Returns: None """ from Acquire.ObjectStore import OSParRegistry as _OSParRegistry if par is None: par = _OSParRegistry.get( par_uid=par_uid, details_function=_get_driver_details_from_data, url_checksum=url_checksum) from Acquire.ObjectStore import OSPar as _OSPar if not isinstance(par, _OSPar): raise TypeError("The OSPar must be of type OSPar") if par.driver() != "oci": raise ValueError("Cannot delete a OSPar that was not created " "by the OCI object store") # delete the PAR from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket par_bucket = par.driver_details()["bucket"] par_id = par.driver_details()["par_id"] bucket = _get_service_account_bucket() # now get the bucket accessed by the OSPar... bucket = OCI_ObjectStore.get_bucket(bucket=bucket, bucket_name=par_bucket) client = bucket["client"] try: response = client.delete_preauthenticated_request( client.get_namespace().data, bucket["bucket_name"], par_id) except Exception as e: from Acquire.ObjectStore import ObjectStoreError raise ObjectStoreError("Unable to delete a OSPar '%s' : Error %s" % (par_id, str(e))) if response.status not in [200, 204]: from Acquire.ObjectStore import ObjectStoreError raise ObjectStoreError( "Unable to delete a OSPar '%s' : Status %s, Error %s" % (par_id, response.status, str(response.data))) # close the OSPar - this will trigger any close_function(s) _OSParRegistry.close(par=par)
def get_bucket(self): if self._bucket: return self._bucket else: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket self._bucket = _get_service_account_bucket() return self._bucket
def close(par): """Close the passed PAR. This will remove the registration for the PAR and will also call the associated cleanup_function (if any) """ from Acquire.Service import is_running_service as _is_running_service if not _is_running_service(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import OSPar as _OSPar from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import datetime_to_string \ as _datetime_to_string from Acquire.ObjectStore import Function as _Function if par is None: return if not isinstance(par, _OSPar): raise TypeError("You can only close OSPar objects!") if par.is_null(): return expire_string = _datetime_to_string(par.expires_when()) bucket = _get_service_account_bucket() key = "%s/expire/%s/%s" % (_registry_key, expire_string, par.uid()) try: _ObjectStore.delete_object(bucket=bucket, key=key) except: pass key = "%s/uid/%s/%s" % (_registry_key, par.uid(), expire_string) try: data = _ObjectStore.take_object_from_json(bucket=bucket, key=key) except: data = None if data is None: # this PAR has already been closed return if "cleanup_function" in data: cleanup_function = _Function.from_data(data["cleanup_function"]) cleanup_function(par=par)
def save(self): """Save this ComputeJob to the objectstore""" if self.is_null(): return from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "compute/job/%s" % self._uid _ObjectStore.set_object_from_json(bucket, key, self.to_data())
def close_uploader(self, file_uid, secret): """Close the uploader associated with the passed file_uid, authenticated using the passed secret """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid) try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: # the uploader has already been closed return shared_secret = data["secret"] if secret != shared_secret: raise PermissionError( "Invalid request - you do not have permission to " "close this uploader") try: data2 = _ObjectStore.take_object_from_json(bucket, key) except: data2 = None if data2 is None: # someone else is already in the process of closing # this uploader - let them do it! return filename = data["filename"] version = data["version"] # now get the FileInfo for this file from Acquire.Storage import FileInfo as _FileInfo fileinfo = _FileInfo.load(drive=self, filename=filename, version=version) file_key = data["filekey"] file_bucket = self._get_file_bucket(file_key) fileinfo.close_uploader(file_bucket=file_bucket) fileinfo.save()
def upload_chunk(self, file_uid, chunk_index, secret, chunk, checksum): """Upload a chunk of the file with UID 'file_uid'. This is the chunk at index 'chunk_idx', which is set equal to 'chunk' (validated with 'checksum'). The passed secret is used to authenticate this upload. The secret should be the multi_md5 has of the shared secret with the concatenated drive_uid, file_uid and chunk_index """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid) data = _ObjectStore.get_object_from_json(bucket, key) shared_secret = data["secret"] from Acquire.Crypto import Hash as _Hash shared_secret = _Hash.multi_md5( shared_secret, "%s%s%d" % (self._drive_uid, file_uid, chunk_index)) if secret != shared_secret: raise PermissionError( "Invalid chunked upload secret. You do not have permission " "to upload chunks to this file!") # validate the data checksum check = _Hash.md5(chunk) if check != checksum: from Acquire.Storage import FileValidationError raise FileValidationError( "Invalid checksum for chunk: %s versus %s" % (check, checksum)) meta = { "filesize": len(chunk), "checksum": checksum, "compression": "bz2" } file_key = data["filekey"] chunk_index = int(chunk_index) file_bucket = self._get_file_bucket(file_key) data_key = "%s/data/%d" % (file_key, chunk_index) meta_key = "%s/meta/%d" % (file_key, chunk_index) from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(file_bucket, meta_key, meta) _ObjectStore.set_object(file_bucket, data_key, chunk)
def _refresh_this_service_keys_and_certs(service_info, service_password): from Acquire.Service import Service as _Service service = _Service.from_data(service_info, service_password) if service._uid == "STAGE1": return service_info if not service.should_refresh_keys(): return service_info oldkeys = service.dump_keys(include_old_keys=False) # now write the old keys to storage from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import Mutex as _Mutex from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/oldkeys/%s" % (_service_key, oldkeys["datetime"]) _ObjectStore.set_object_from_json(bucket, key, oldkeys) # now write the pointers from fingerprint to file... for fingerprint in oldkeys.keys(): if fingerprint not in ["datetime", "encrypted_passphrase"]: _ObjectStore.set_string_object( bucket, "%s/oldkeys/fingerprints/%s" % (_service_key, fingerprint), key) # generate new keys last_update = service.last_key_update() service.refresh_keys() # now lock the object store so that we are the only function # that can write the new keys to global state m = _Mutex(key=service.uid(), bucket=bucket) service_data = _ObjectStore.get_object_from_json(bucket, _service_key) service_info = _Service.from_data(service_data) if service_info.last_key_update() == last_update: # no-one else has beaten us - write the updated keys to global state _ObjectStore.set_object_from_json(bucket, _service_key, service.to_data(service_password)) m.unlock() return service_data
def register(self, par, authorisation, secret=None): """Register the passed par, which is authorised using the passed authorisation. If the authorisation is correct this this will return the URL of the PAR """ from Acquire.Client import PAR as _PAR from Acquire.Client import Authorisation as _Authorisation if not isinstance(par, _PAR): raise TypeError("The par must be type PAR") # create a new UID for this PAR from Acquire.ObjectStore import create_uid as _create_uid uid = _create_uid() par._set_uid(uid) if par.expired(): raise PermissionError("The passed PAR has already expired!") if not isinstance(authorisation, _Authorisation): raise TypeError("The authorisation must be type Authorisation") identifiers = authorisation.verify(resource="create_par %s" % par.fingerprint(), return_identifiers=True) from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket if secret is not None and len(secret) > 0: from Acquire.Crypto import Hash secret = Hash.multi_md5(uid, secret) else: secret = None import json as _json data = { "par": par.to_data(), "identifiers": _json.dumps(identifiers), "secret": secret } key = "%s/%s" % (_par_root, uid) bucket = _get_service_account_bucket() _ObjectStore.set_object_from_json(bucket, key, data) return uid
def save(self): """Save the metadata about this drive to the object store""" if self.is_null(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore bucket = _get_service_account_bucket() drive_key = self._drive_key() data = self.to_data() _ObjectStore.set_object_from_json(bucket, drive_key, data)
def get_trusted_services(): """Return a dictionary of all trusted services indexed by their type """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_this_service as _get_this_service from Acquire.Service import Service as _Service from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded # we already trust ourselves service = _get_this_service() trusted_services = {} trusted_services[service.service_type()] = [service] bucket = _get_service_account_bucket() uidkey = "_trusted/uid/" datas = _ObjectStore.get_all_objects(bucket, uidkey) for data in datas: remote_service = _Service.from_data(data) if remote_service.should_refresh_keys(): # need to update the keys in our copy of the service remote_service.refresh_keys() key = "%s/%s" % (uidkey, remote_service.uid()) _ObjectStore.set_object_from_json(bucket, key, remote_service.to_data()) if remote_service.service_type() in datas: datas[remote_service.service_type()].append(remote_service) else: datas[remote_service.service_type()] = [remote_service] return datas else: # this is running on the client from Acquire.Client import Wallet as _Wallet wallet = _Wallet() return wallet.get_services()
def login(credentials, user_uid=None, remember_device=False): """Login to the session with specified 'short_uid' with the user with passed 'username' and 'credentials', optionally specifying the user_uid """ if user_uid is None: # find all of the user_uids of accounts with this # username+password combination from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.Client import Credentials as _Credentials from Acquire.Identity import UserCredentials as _UserCredentials from Acquire.Service import get_this_service as _get_this_service if not isinstance(credentials, _Credentials): raise TypeError("The credentials must be type Credentials") bucket = _get_service_account_bucket() encoded_password = _UserCredentials.hash( username=credentials.username(), password=credentials.password()) prefix = "%s/passwords/%s/" % (_user_root, encoded_password) try: names = _ObjectStore.get_all_object_names(bucket=bucket, prefix=prefix) except: names = [] user_uids = [] for name in names: user_uids.append(name.split("/")[-1]) else: user_uids = [user_uid] if len(user_uids) == 0: from Acquire.Identity import UserValidationError raise UserValidationError("No user with name '%s'" % credentials.username()) from Acquire.Identity import UserCredentials as _UserCredentials return _UserCredentials.login(credentials=credentials, user_uids=user_uids, remember_device=remember_device)
def save(self): """Save the current state of this LoginSession to the object store """ if self.is_null(): return from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = self._get_key() _ObjectStore.set_object_from_json(bucket=bucket, key=key, data=self.to_data())
def refresh_service_keys_and_certs(service, force_refresh=False): """This function will check if any key rotation is needed, and if so, it will automatically refresh the keys and certificates. The old keys and certificates will be stored in a database of old keys and certificates """ assert_running_service() if service._uid == "STAGE1": return service if (not force_refresh) and (not service.should_refresh_keys()): return service # ensure that the current keys are saved to the object store save_service_keys_to_objstore() # generate new keys last_update = service.last_key_update() service.refresh_keys() # now lock the object store so that we are the only function # that can write the new keys to global state from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.Service import Service as _Service from Acquire.ObjectStore import Mutex as _Mutex from Acquire.ObjectStore import ObjectStore as _ObjectStore bucket = _get_service_account_bucket() m = _Mutex(key=service.uid(), bucket=bucket) service_data = _ObjectStore.get_object_from_json(bucket, _service_key) service_info = _Service.from_data(service_data) if service_info.last_key_update() == last_update: # no-one else has beaten us - write the updated keys to global state _ObjectStore.set_object_from_json( bucket, _service_key, service.to_data(_get_service_password())) m.unlock() # clear the cache as we will need to load a new object clear_serviceinfo_cache() return get_this_service(need_private_access=True)
def load(uid): """Load the ComputeJob associated with the specified uid""" from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "compute/job/%s" % uid try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: raise KeyError("There is no job with UID = %s" % uid) return ComputeJob.from_data(data)