def register(par, url_checksum, details_function, cleanup_function=None): """Register the passed PAR, passing in the checksum of the PAR's secret URL (so we can verify the close), and optionally supplying a cleanup_function that is called when the PAR is closed. The passed 'details_function' should be used to extract the object-store driver-specific details from the PAR and convert them into a dictionary. The signature should be; driver_details = details_function(par) """ from Acquire.Service import is_running_service as _is_running_service if not _is_running_service(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import OSPar as _OSPar from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import Function as _Function from Acquire.ObjectStore import datetime_to_string \ as _datetime_to_string if par is None: return if not isinstance(par, _OSPar): raise TypeError("You can only register pars of type PAR") if par.is_null(): return data = {} data["par"] = par.to_data() if details_function is None: data["driver_details"] = par._driver_details else: data["driver_details"] = details_function(par) data["url_checksum"] = url_checksum if cleanup_function is not None: if not isinstance(cleanup_function, _Function): cleanup_function = _Function(cleanup_function) data["cleanup_function"] = cleanup_function.to_data() expire_string = _datetime_to_string(par.expires_when()) key = "%s/uid/%s/%s" % (_registry_key, par.uid(), expire_string) bucket = _get_service_account_bucket() _ObjectStore.set_object_from_json(bucket, key, data) key = "%s/expire/%s/%s" % (_registry_key, expire_string, par.uid()) _ObjectStore.set_object_from_json(bucket, key, par.uid())
def list_drives(self, drive_uid=None): """Return a list of all of the top-level drives to which this user has access, or all of the sub-drives of the drive with passed 'drive_uid' """ if self.is_null(): return [] from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import encoded_to_string as _encoded_to_string from Acquire.Storage import DriveMeta as _DriveMeta bucket = _get_service_account_bucket() if drive_uid is None: # look for the top-level drives names = _ObjectStore.get_all_object_names( bucket, "%s/%s" % (_drives_root, self._user_guid)) else: # look for the subdrives names = _ObjectStore.get_all_object_names( bucket, "%s/%s/%s" % (_subdrives_root, self._user_guid, drive_uid)) drives = [] for name in names: drive_name = _encoded_to_string(name.split("/")[-1]) drives.append(_DriveMeta(name=drive_name, container=drive_uid)) return drives
def untrust_service(service): """Stop trusting the passed service. This will remove the service as being trusted. You must pass in a valid admin_user authorisation for this service """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded bucket = _get_service_account_bucket() urlkey = "_trusted/url/%s" % _url_to_encoded(service.canonical_url()) uidkey = "_trusted/uid/%s" % service.uid() # delete the trusted service by both canonical_url and uid try: _ObjectStore.delete_object(bucket, uidkey) except: pass try: _ObjectStore.delete_object(bucket, urlkey) except: pass from Acquire.Service import clear_services_cache \ as _clear_services_cache _clear_services_cache() else: from Acquire.Client import Wallet as _Wallet wallet = _Wallet() wallet.remove_service(service)
def save_transaction(record, bucket=None): """Save the passed transaction record to the object store Args: record (TransactionRecord): To save bucket (dict, default=None): Bucket to save data from Returns: None """ from Acquire.Accounting import TransactionRecord as _TransactionRecord if not isinstance(record, _TransactionRecord): raise TypeError("You can only write TransactionRecord objects " "to the ledger!") if not record.is_null(): if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(bucket, Ledger.get_key(record.uid()), record.to_data())
def save_service_keys_to_objstore(include_old_keys=False): """Call this function to ensure that the current set of keys used for this service are saved to object store """ service = get_this_service(need_private_access=True) oldkeys = service.dump_keys(include_old_keys=include_old_keys) # now write the old keys to storage from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/oldkeys/%s" % (_service_key, oldkeys["datetime"]) _ObjectStore.set_object_from_json(bucket, key, oldkeys) # now write the pointers from fingerprint to file... for fingerprint in oldkeys.keys(): if fingerprint not in ["datetime", "encrypted_passphrase"]: _ObjectStore.set_string_object( bucket, "%s/oldkeys/fingerprints/%s" % (_service_key, fingerprint), key)
def fully_unlock(self): """This fully unlocks the mutex, removing all levels of recursion Returns: None """ if self._is_locked == 0: return from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import get_datetime_now as _get_datetime_now try: holder = _ObjectStore.get_string_object(self._bucket, self._key) except: holder = None if holder == self._lockstring: # we hold the mutex - delete the key _ObjectStore.delete_object(self._bucket, self._key) self._lockstring = None self._is_locked = 0 if self._end_lease < _get_datetime_now(): self._end_lease = None from Acquire.ObjectStore import MutexTimeoutError raise MutexTimeoutError("The lease on this mutex expired before " "this mutex was unlocked!") else: self._end_lease = None
def _record_daily_balance(self, balance, liability, receivable, datetime=None, bucket=None): """Record the starting balance for the day containing 'datetime' as 'balance' with the starting outstanding liabilities at 'liability' and starting outstanding accounts receivable at 'receivable' If 'datetime' is none, then the balance for today is set. """ if self.is_null(): return if datetime is None: datetime = _datetime.datetime.now() balance = _create_decimal(balance) liability = _create_decimal(liability) receivable = _create_decimal(receivable) balance_key = self._get_balance_key(datetime) if bucket is None: bucket = _login_to_service_account() data = { "balance": str(balance), "liability": str(liability), "receivable": str(receivable) } _ObjectStore.set_object_from_json(bucket, balance_key, data)
def _delete_note(self, note, bucket=None): """Internal function called to delete the passed note from the record. This is unsafe and should only be called from DebitNote.return_value or CreditNote.return_value (which themselves are only called from Ledger) """ if note is None: return if isinstance(note, _DebitNote) or isinstance(note, _CreditNote): item_key = "%s/%s" % (self._key(), note.uid()) if bucket is None: bucket = _login_to_service_account() # remove the note try: _ObjectStore.delete_object(bucket, item_key) except: pass # now remove all day-balances from the day before this note # to today. Hopefully this will prevent any ledger errors... day0 = _datetime.datetime.fromtimestamp( note.timestamp()).toordinal() - 1 day1 = _datetime.datetime.now().toordinal() for day in range(day0, day1 + 1): balance_key = self._get_balance_key( _datetime.datetime.fromordinal(day)) try: _ObjectStore.delete_object(bucket, balance_key) except: pass
def close_downloader(self, downloader_uid, file_uid, secret): """Close the downloader associated with the passed downloader_uid and file_uid, authenticated using the passed secret """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s/%s" % (_downloader_root, self._drive_uid, file_uid, downloader_uid) try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: # the downloader has already been closed return shared_secret = data["secret"] if secret != shared_secret: raise PermissionError( "Invalid request - you do not have permission to " "close this downloader") try: _ObjectStore.take_object_from_json(bucket, key) except: pass
def set_trusted_service_info(service_url, service): """Set the trusted service info for 'service_url' to 'service'""" bucket = _login_to_service_account() _ObjectStore.set_object_from_json( bucket, "services/%s" % url_to_encoded(service_url), service.to_data())
def _validate_file_upload(par, file_bucket, file_key, objsize, checksum): """Call this function to signify that the file associated with this PAR has been uploaded. This will check that the objsize and checksum match with what was promised """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.Service import get_this_service as _get_this_service service = _get_this_service() bucket = _get_service_account_bucket() file_bucket = _ObjectStore.get_bucket(bucket=bucket, bucket_name=file_bucket, create_if_needed=True) # check that the file uploaded matches what was promised (real_objsize, real_checksum) = _ObjectStore.get_size_and_checksum( file_bucket, file_key) if real_objsize != objsize or real_checksum != checksum: # probably should delete the broken object here... from Acquire.Storage import FileValidationError raise FileValidationError( "The file uploaded does not match what was promised. " "size: %s versus %s, checksum: %s versus %s. Please try " "to upload the file again." % (real_objsize, objsize, real_checksum, checksum))
def submit_job(self, uid): """Submit the job with specified UID to this cluster. On the service this will put the UID of the job into the "pending" pool, and will signal the cluster to pull that job On the client this will pull the job with that UID from the pending pool, moving it to the "submitting" pool and will pass this job to the cluster submission system """ if Cluster._is_running_service(): from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import get_datetime_now_to_string \ as _get_datetime_now_to_string bucket = _get_service_account_bucket() key = "compute/pending/%s" % uid resource = {"pending": _get_datetime_now_to_string(), "uid": uid} _ObjectStore.set_object_from_json(bucket, key, resource) else: # fetch the pending job and change the status to "submitting" return self.get_job(uid=uid, start_state="pending", end_state="submitting")
def trust_service(service): """Trust the passed service. This will record this service as trusted, e.g. saving the keys and certificates for this service and allowing it to be used for the specified type. """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded bucket = _get_service_account_bucket() urlkey = "_trusted/url/%s" % _url_to_encoded(service.canonical_url()) uidkey = "_trusted/uid/%s" % service.uid() service_data = service.to_data() # store the trusted service by both canonical_url and uid from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(bucket, uidkey, service_data) _ObjectStore.set_string_object(bucket, urlkey, uidkey) from Acquire.Service import clear_services_cache \ as _clear_services_cache _clear_services_cache() else: from Acquire.Client import Wallet as _Wallet wallet = _Wallet() wallet.add_service(service)
def test_objstore(bucket): keys = [] message = "ƒƒƒ Hello World ∂∂∂" ObjectStore.set_string_object(bucket, "test", message) keys.append("test") assert (message == ObjectStore.get_string_object(bucket, "test")) message = "€€#¢∞ Hello ˚ƒ´πµçµΩ" ObjectStore.set_string_object(bucket, "test/something", message) keys.append("test/something") assert (message == ObjectStore.get_string_object(bucket, "test/something")) data = { "cat": "mieow", "dog": "woof", "sounds": [1, 2, 3, 4, 5], "flag": True } ObjectStore.set_object_from_json(bucket, "test/object", data) keys.append("test/object") assert (data == ObjectStore.get_object_from_json(bucket, "test/object")) names = ObjectStore.get_all_object_names(bucket) assert (len(names) == len(keys)) for name in names: assert (name in keys)
def remove_trusted_service_info(service_url): """Remove the passed 'service_url' from the list of trusted services""" bucket = _login_to_service_account() try: _ObjectStore.delete_object(bucket, "services/%s" % url_to_encoded(service_url)) except: pass
def _generate_service_uid(bucket, registry_uid): """Function to generate a new service_uid on this registry. The UIDs have the form a0-a0, when "a" is any letter from [a-zA-Z] and "0" is any number from [0-9]. This give 520 possible values for each part either side of the hyphen. The part on the left of the hypen is the root UID, which matches the root of the service_uid of the registry service that registered this service (the service_uid of a registry service has the UID root-root). If more than 520 values are needed, then either side of the ID can be extended by additional pairs of a0 digits, using a "." to separate pairs, e.g. the service_uid for registry b4-b4 that comes after b4-Z9.Z9.Z9 is b4-a0.a0.a0.a0 similarly, the registry after Z9 is A0-A0. This means that a0.a0-a0.a0.a0.a0 would be a perfectly valid ID. We would only need IDs of this length if we have ~270k registry services, and this service_uid came from a service that had registered ~73 trillion services... The registry root Z9, with registry Z9-Z9 is reserved for the temporary registry created during testing """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import Mutex as _Mutex root = registry_uid.split("-")[0] key = "%s/last_service_uid" % _registry_key mutex = _Mutex(key=key) try: last_vals = _ObjectStore.get_object_from_json(bucket=bucket, key=key) last_vals = _inc_uid(last_vals) except: last_vals = [0, 0] service_uid = "%s-%s" % (root, _to_uid(last_vals)) while service_uid == registry_uid: last_vals = _inc_uid(last_vals) service_uid = "%s-%s" % (root, _to_uid(last_vals)) _ObjectStore.set_object_from_json(bucket=bucket, key=key, data=last_vals) mutex.unlock() return service_uid
def create_service_user_account(service, accounting_service_url): """Call this function to create the financial service account for this service on the accounting service at 'accounting_service_url' This does nothing if the account already exists """ assert_running_service() accounting_service = service.get_trusted_service( service_url=accounting_service_url) accounting_service_uid = accounting_service.uid() key = "%s/account/%s" % (_service_key, accounting_service_uid) bucket = service.bucket() from Acquire.ObjectStore import ObjectStore as _ObjectStore try: account_uid = _ObjectStore.get_string_object(bucket, key) except: account_uid = None if account_uid: # we already have an account... return service_user = service.login_service_user() try: from Acquire.Client import create_account as _create_account from Acquire.Client import deposit as _deposit account = _create_account( service_user, "main", "Main account to receive payment for all use on service " "%s (%s)" % (service.canonical_url(), service.uid()), accounting_service=accounting_service) _deposit(user=service_user, value=100.0, account_name="main", accounting_service=accounting_service) account_uid = account.uid() _ObjectStore.set_string_object(bucket, key, account_uid) except Exception as e: from Acquire.Service import exception_to_string from Acquire.Service import ServiceAccountError raise ServiceAccountError( "Unable to create a financial account for the service " "principal for '%s' on accounting service '%s'\n\nERROR\n%s" % (str(service), str(accounting_service), exception_to_string(e)))
def close(par): """Close the passed PAR. This will remove the registration for the PAR and will also call the associated cleanup_function (if any) """ from Acquire.Service import is_running_service as _is_running_service if not _is_running_service(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import OSPar as _OSPar from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import datetime_to_string \ as _datetime_to_string from Acquire.ObjectStore import Function as _Function if par is None: return if not isinstance(par, _OSPar): raise TypeError("You can only close OSPar objects!") if par.is_null(): return expire_string = _datetime_to_string(par.expires_when()) bucket = _get_service_account_bucket() key = "%s/expire/%s/%s" % (_registry_key, expire_string, par.uid()) try: _ObjectStore.delete_object(bucket=bucket, key=key) except: pass key = "%s/uid/%s/%s" % (_registry_key, par.uid(), expire_string) try: data = _ObjectStore.take_object_from_json(bucket=bucket, key=key) except: data = None if data is None: # this PAR has already been closed return if "cleanup_function" in data: cleanup_function = _Function.from_data(data["cleanup_function"]) cleanup_function(par=par)
def save(self): """Save this ComputeJob to the objectstore""" if self.is_null(): return from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "compute/job/%s" % self._uid _ObjectStore.set_object_from_json(bucket, key, self.to_data())
def save_transaction(record, bucket=None): """Save the passed transactionrecord to the object store""" if not isinstance(record, _TransactionRecord): raise TypeError("You can only write TransactionRecord objects " "to the ledger!") if not record.is_null(): if bucket is None: bucket = _login_to_service_account() _ObjectStore.set_object_from_json(bucket, Ledger.get_key(record.uid()), record.to_data())
def close_uploader(self, file_uid, secret): """Close the uploader associated with the passed file_uid, authenticated using the passed secret """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid) try: data = _ObjectStore.get_object_from_json(bucket, key) except: data = None if data is None: # the uploader has already been closed return shared_secret = data["secret"] if secret != shared_secret: raise PermissionError( "Invalid request - you do not have permission to " "close this uploader") try: data2 = _ObjectStore.take_object_from_json(bucket, key) except: data2 = None if data2 is None: # someone else is already in the process of closing # this uploader - let them do it! return filename = data["filename"] version = data["version"] # now get the FileInfo for this file from Acquire.Storage import FileInfo as _FileInfo fileinfo = _FileInfo.load(drive=self, filename=filename, version=version) file_key = data["filekey"] file_bucket = self._get_file_bucket(file_key) fileinfo.close_uploader(file_bucket=file_bucket) fileinfo.save()
def upload_chunk(self, file_uid, chunk_index, secret, chunk, checksum): """Upload a chunk of the file with UID 'file_uid'. This is the chunk at index 'chunk_idx', which is set equal to 'chunk' (validated with 'checksum'). The passed secret is used to authenticate this upload. The secret should be the multi_md5 has of the shared secret with the concatenated drive_uid, file_uid and chunk_index """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid) data = _ObjectStore.get_object_from_json(bucket, key) shared_secret = data["secret"] from Acquire.Crypto import Hash as _Hash shared_secret = _Hash.multi_md5( shared_secret, "%s%s%d" % (self._drive_uid, file_uid, chunk_index)) if secret != shared_secret: raise PermissionError( "Invalid chunked upload secret. You do not have permission " "to upload chunks to this file!") # validate the data checksum check = _Hash.md5(chunk) if check != checksum: from Acquire.Storage import FileValidationError raise FileValidationError( "Invalid checksum for chunk: %s versus %s" % (check, checksum)) meta = { "filesize": len(chunk), "checksum": checksum, "compression": "bz2" } file_key = data["filekey"] chunk_index = int(chunk_index) file_bucket = self._get_file_bucket(file_key) data_key = "%s/data/%d" % (file_key, chunk_index) meta_key = "%s/meta/%d" % (file_key, chunk_index) from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(file_bucket, meta_key, meta) _ObjectStore.set_object(file_bucket, data_key, chunk)
def register(self, par, authorisation, secret=None): """Register the passed par, which is authorised using the passed authorisation. If the authorisation is correct this this will return the URL of the PAR """ from Acquire.Client import PAR as _PAR from Acquire.Client import Authorisation as _Authorisation if not isinstance(par, _PAR): raise TypeError("The par must be type PAR") # create a new UID for this PAR from Acquire.ObjectStore import create_uid as _create_uid uid = _create_uid() par._set_uid(uid) if par.expired(): raise PermissionError("The passed PAR has already expired!") if not isinstance(authorisation, _Authorisation): raise TypeError("The authorisation must be type Authorisation") identifiers = authorisation.verify(resource="create_par %s" % par.fingerprint(), return_identifiers=True) from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket if secret is not None and len(secret) > 0: from Acquire.Crypto import Hash secret = Hash.multi_md5(uid, secret) else: secret = None import json as _json data = { "par": par.to_data(), "identifiers": _json.dumps(identifiers), "secret": secret } key = "%s/%s" % (_par_root, uid) bucket = _get_service_account_bucket() _ObjectStore.set_object_from_json(bucket, key, data) return uid
def save(self): """Save the metadata about this drive to the object store""" if self.is_null(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore bucket = _get_service_account_bucket() drive_key = self._drive_key() data = self.to_data() _ObjectStore.set_object_from_json(bucket, drive_key, data)
def run(args): """This function will allow anyone to query the current login status of the session with passed UID""" status = 0 message = None session_status = None session_uid = args["session_uid"] username = args["username"] # generate a sanitised version of the username user_account = UserAccount(username) # now log into the central identity account to query # the current status of this login session bucket = login_to_service_account() user_session_key = "sessions/%s/%s" % \ (user_account.sanitised_name(), session_uid) try: login_session = LoginSession.from_data( ObjectStore.get_object_from_json(bucket, user_session_key)) except: login_session = None if login_session is None: user_session_key = "expired_sessions/%s/%s" % \ (user_account.sanitised_name(), session_uid) login_session = LoginSession.from_data( ObjectStore.get_object_from_json(bucket, user_session_key)) if login_session is None: raise InvalidSessionError("Cannot find the session '%s'" % session_uid) status = 0 message = "Success: Status = %s" % login_session.status() session_status = login_session.status() return_value = create_return_value(status, message) if session_status: return_value["session_status"] = session_status return return_value
def get_trusted_services(): """Return a dictionary of all trusted services indexed by their type """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_this_service as _get_this_service from Acquire.Service import Service as _Service from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded # we already trust ourselves service = _get_this_service() trusted_services = {} trusted_services[service.service_type()] = [service] bucket = _get_service_account_bucket() uidkey = "_trusted/uid/" datas = _ObjectStore.get_all_objects(bucket, uidkey) for data in datas: remote_service = _Service.from_data(data) if remote_service.should_refresh_keys(): # need to update the keys in our copy of the service remote_service.refresh_keys() key = "%s/%s" % (uidkey, remote_service.uid()) _ObjectStore.set_object_from_json(bucket, key, remote_service.to_data()) if remote_service.service_type() in datas: datas[remote_service.service_type()].append(remote_service) else: datas[remote_service.service_type()] = [remote_service] return datas else: # this is running on the client from Acquire.Client import Wallet as _Wallet wallet = _Wallet() return wallet.get_services()
def load_transaction(uid, bucket=None): """Load the transactionrecord with UID=uid from the ledger Args: uid (str): UID of transaction to load bucket (dict, default=None): Bucket to load data from Returns: TransactionRecord: Transaction with that UID """ if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() from Acquire.Accounting import TransactionRecord as _TransactionRecord from Acquire.ObjectStore import ObjectStore as _ObjectStore data = _ObjectStore.get_object_from_json(bucket, Ledger.get_key(uid)) if data is None: from Acquire.Accounting import LedgerError raise LedgerError("There is no transaction recorded in the " "ledger with UID=%s (at key %s)" % (uid, Ledger.get_key(uid))) return _TransactionRecord.from_data(data)
def get_pending_job_uids(self, passphrase=None): """Return the UIDs of all of the jobs that need to be submitted""" if self.is_null(): return [] if Cluster._is_running_service(): from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket self.verify_passphrase(resource="get_pending_job_uids", passphrase=passphrase) bucket = _get_service_account_bucket() prefix = "compute/pending/" uids = _ObjectStore.get_all_object_names(bucket=bucket, prefix=prefix, without_prefix=True) return uids else: passphrase = self.passphrase(resource="get_pending_job_uids") args = {"passphrase": passphrase} result = self.compute_service().call_function( function="get_pending_job_uids", args=args) return self.decrypt_data(result["job_uids"])
def get_local_bucket(empty=False): """ Creates and returns a local bucket that's created in the /tmp/hugs_test directory Args: empty (bool, default=False): If True return an empty bucket Returns: str: Path to local bucket """ from pathlib import Path import shutil from Acquire.ObjectStore import use_testing_object_store_backend local_buckets_dir = Path("/tmp/hugs_test") if local_buckets_dir.exists() and empty is True: shutil.rmtree(local_buckets_dir) local_buckets_dir.mkdir(parents=True) else: local_buckets_dir.mkdir(parents=True) root_bucket = use_testing_object_store_backend(local_buckets_dir) bucket = ObjectStore.create_bucket(bucket=root_bucket, bucket_name="hugs_test") return bucket
def get_service_user_account_uid(accounting_service_uid): """Return the UID of the financial Acquire.Accounting.Account that is held on the accounting service with UID 'accounting_service_uid' for the service user on this service. This is the account to which payment for this service should be sent """ assert_running_service() from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore bucket = _get_service_account_bucket() key = "%s/account/%s" % (_service_key, accounting_service_uid) try: account_uid = _ObjectStore.get_string_object(bucket, key) except: account_uid = None if account_uid is None: from Acquire.Service import ServiceAccountError raise ServiceAccountError( "This service does not have a valid financial account on " "the accounting service at '%s'" % accounting_service_uid) return account_uid