def trust_service(service): """Trust the passed service. This will record this service as trusted, e.g. saving the keys and certificates for this service and allowing it to be used for the specified type. """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded bucket = _get_service_account_bucket() urlkey = "_trusted/url/%s" % _url_to_encoded(service.canonical_url()) uidkey = "_trusted/uid/%s" % service.uid() service_data = service.to_data() # store the trusted service by both canonical_url and uid from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(bucket, uidkey, service_data) _ObjectStore.set_string_object(bucket, urlkey, uidkey) from Acquire.Service import clear_services_cache \ as _clear_services_cache _clear_services_cache() else: from Acquire.Client import Wallet as _Wallet wallet = _Wallet() wallet.add_service(service)
def save_transaction(record, bucket=None): """Save the passed transaction record to the object store Args: record (TransactionRecord): To save bucket (dict, default=None): Bucket to save data from Returns: None """ from Acquire.Accounting import TransactionRecord as _TransactionRecord if not isinstance(record, _TransactionRecord): raise TypeError("You can only write TransactionRecord objects " "to the ledger!") if not record.is_null(): if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(bucket, Ledger.get_key(record.uid()), record.to_data())
def test_objstore(bucket): keys = [] message = "ƒƒƒ Hello World ∂∂∂" ObjectStore.set_string_object(bucket, "test", message) keys.append("test") assert (message == ObjectStore.get_string_object(bucket, "test")) message = "€€#¢∞ Hello ˚ƒ´πµçµΩ" ObjectStore.set_string_object(bucket, "test/something", message) keys.append("test/something") assert (message == ObjectStore.get_string_object(bucket, "test/something")) data = { "cat": "mieow", "dog": "woof", "sounds": [1, 2, 3, 4, 5], "flag": True } ObjectStore.set_object_from_json(bucket, "test/object", data) keys.append("test/object") assert (data == ObjectStore.get_object_from_json(bucket, "test/object")) names = ObjectStore.get_all_object_names(bucket) assert (len(names) == len(keys)) for name in names: assert (name in keys)
def _record_daily_balance(self, balance, liability, receivable, datetime=None, bucket=None): """Record the starting balance for the day containing 'datetime' as 'balance' with the starting outstanding liabilities at 'liability' and starting outstanding accounts receivable at 'receivable' If 'datetime' is none, then the balance for today is set. """ if self.is_null(): return if datetime is None: datetime = _datetime.datetime.now() balance = _create_decimal(balance) liability = _create_decimal(liability) receivable = _create_decimal(receivable) balance_key = self._get_balance_key(datetime) if bucket is None: bucket = _login_to_service_account() data = { "balance": str(balance), "liability": str(liability), "receivable": str(receivable) } _ObjectStore.set_object_from_json(bucket, balance_key, data)
def register(par, url_checksum, details_function, cleanup_function=None): """Register the passed PAR, passing in the checksum of the PAR's secret URL (so we can verify the close), and optionally supplying a cleanup_function that is called when the PAR is closed. The passed 'details_function' should be used to extract the object-store driver-specific details from the PAR and convert them into a dictionary. The signature should be; driver_details = details_function(par) """ from Acquire.Service import is_running_service as _is_running_service if not _is_running_service(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import OSPar as _OSPar from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import Function as _Function from Acquire.ObjectStore import datetime_to_string \ as _datetime_to_string if par is None: return if not isinstance(par, _OSPar): raise TypeError("You can only register pars of type PAR") if par.is_null(): return data = {} data["par"] = par.to_data() if details_function is None: data["driver_details"] = par._driver_details else: data["driver_details"] = details_function(par) data["url_checksum"] = url_checksum if cleanup_function is not None: if not isinstance(cleanup_function, _Function): cleanup_function = _Function(cleanup_function) data["cleanup_function"] = cleanup_function.to_data() expire_string = _datetime_to_string(par.expires_when()) key = "%s/uid/%s/%s" % (_registry_key, par.uid(), expire_string) bucket = _get_service_account_bucket() _ObjectStore.set_object_from_json(bucket, key, data) key = "%s/expire/%s/%s" % (_registry_key, expire_string, par.uid()) _ObjectStore.set_object_from_json(bucket, key, par.uid())
def save_service_keys_to_objstore(include_old_keys=False): """Call this function to ensure that the current set of keys used for this service are saved to object store """ service = get_this_service(need_private_access=True) oldkeys = service.dump_keys(include_old_keys=include_old_keys) # now write the old keys to storage from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/oldkeys/%s" % (_service_key, oldkeys["datetime"]) _ObjectStore.set_object_from_json(bucket, key, oldkeys) # now write the pointers from fingerprint to file... for fingerprint in oldkeys.keys(): if fingerprint not in ["datetime", "encrypted_passphrase"]: _ObjectStore.set_string_object( bucket, "%s/oldkeys/fingerprints/%s" % (_service_key, fingerprint), key)
def set_trusted_service_info(service_url, service): """Set the trusted service info for 'service_url' to 'service'""" bucket = _login_to_service_account() _ObjectStore.set_object_from_json( bucket, "services/%s" % url_to_encoded(service_url), service.to_data())
def submit_job(self, uid): """Submit the job with specified UID to this cluster. On the service this will put the UID of the job into the "pending" pool, and will signal the cluster to pull that job On the client this will pull the job with that UID from the pending pool, moving it to the "submitting" pool and will pass this job to the cluster submission system """ if Cluster._is_running_service(): from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import get_datetime_now_to_string \ as _get_datetime_now_to_string bucket = _get_service_account_bucket() key = "compute/pending/%s" % uid resource = {"pending": _get_datetime_now_to_string(), "uid": uid} _ObjectStore.set_object_from_json(bucket, key, resource) else: # fetch the pending job and change the status to "submitting" return self.get_job(uid=uid, start_state="pending", end_state="submitting")
def _generate_service_uid(bucket, registry_uid): """Function to generate a new service_uid on this registry. The UIDs have the form a0-a0, when "a" is any letter from [a-zA-Z] and "0" is any number from [0-9]. This give 520 possible values for each part either side of the hyphen. The part on the left of the hypen is the root UID, which matches the root of the service_uid of the registry service that registered this service (the service_uid of a registry service has the UID root-root). If more than 520 values are needed, then either side of the ID can be extended by additional pairs of a0 digits, using a "." to separate pairs, e.g. the service_uid for registry b4-b4 that comes after b4-Z9.Z9.Z9 is b4-a0.a0.a0.a0 similarly, the registry after Z9 is A0-A0. This means that a0.a0-a0.a0.a0.a0 would be a perfectly valid ID. We would only need IDs of this length if we have ~270k registry services, and this service_uid came from a service that had registered ~73 trillion services... The registry root Z9, with registry Z9-Z9 is reserved for the temporary registry created during testing """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import Mutex as _Mutex root = registry_uid.split("-")[0] key = "%s/last_service_uid" % _registry_key mutex = _Mutex(key=key) try: last_vals = _ObjectStore.get_object_from_json(bucket=bucket, key=key) last_vals = _inc_uid(last_vals) except: last_vals = [0, 0] service_uid = "%s-%s" % (root, _to_uid(last_vals)) while service_uid == registry_uid: last_vals = _inc_uid(last_vals) service_uid = "%s-%s" % (root, _to_uid(last_vals)) _ObjectStore.set_object_from_json(bucket=bucket, key=key, data=last_vals) mutex.unlock() return service_uid
def save_transaction(record, bucket=None): """Save the passed transactionrecord to the object store""" if not isinstance(record, _TransactionRecord): raise TypeError("You can only write TransactionRecord objects " "to the ledger!") if not record.is_null(): if bucket is None: bucket = _login_to_service_account() _ObjectStore.set_object_from_json(bucket, Ledger.get_key(record.uid()), record.to_data())
def save(self): """Save this ComputeJob to the objectstore""" if self.is_null(): return from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "compute/job/%s" % self._uid _ObjectStore.set_object_from_json(bucket, key, self.to_data())
def upload_chunk(self, file_uid, chunk_index, secret, chunk, checksum): """Upload a chunk of the file with UID 'file_uid'. This is the chunk at index 'chunk_idx', which is set equal to 'chunk' (validated with 'checksum'). The passed secret is used to authenticate this upload. The secret should be the multi_md5 has of the shared secret with the concatenated drive_uid, file_uid and chunk_index """ from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid) data = _ObjectStore.get_object_from_json(bucket, key) shared_secret = data["secret"] from Acquire.Crypto import Hash as _Hash shared_secret = _Hash.multi_md5( shared_secret, "%s%s%d" % (self._drive_uid, file_uid, chunk_index)) if secret != shared_secret: raise PermissionError( "Invalid chunked upload secret. You do not have permission " "to upload chunks to this file!") # validate the data checksum check = _Hash.md5(chunk) if check != checksum: from Acquire.Storage import FileValidationError raise FileValidationError( "Invalid checksum for chunk: %s versus %s" % (check, checksum)) meta = { "filesize": len(chunk), "checksum": checksum, "compression": "bz2" } file_key = data["filekey"] chunk_index = int(chunk_index) file_bucket = self._get_file_bucket(file_key) data_key = "%s/data/%d" % (file_key, chunk_index) meta_key = "%s/meta/%d" % (file_key, chunk_index) from Acquire.ObjectStore import ObjectStore as _ObjectStore _ObjectStore.set_object_from_json(file_bucket, meta_key, meta) _ObjectStore.set_object(file_bucket, data_key, chunk)
def register(self, par, authorisation, secret=None): """Register the passed par, which is authorised using the passed authorisation. If the authorisation is correct this this will return the URL of the PAR """ from Acquire.Client import PAR as _PAR from Acquire.Client import Authorisation as _Authorisation if not isinstance(par, _PAR): raise TypeError("The par must be type PAR") # create a new UID for this PAR from Acquire.ObjectStore import create_uid as _create_uid uid = _create_uid() par._set_uid(uid) if par.expired(): raise PermissionError("The passed PAR has already expired!") if not isinstance(authorisation, _Authorisation): raise TypeError("The authorisation must be type Authorisation") identifiers = authorisation.verify(resource="create_par %s" % par.fingerprint(), return_identifiers=True) from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket if secret is not None and len(secret) > 0: from Acquire.Crypto import Hash secret = Hash.multi_md5(uid, secret) else: secret = None import json as _json data = { "par": par.to_data(), "identifiers": _json.dumps(identifiers), "secret": secret } key = "%s/%s" % (_par_root, uid) bucket = _get_service_account_bucket() _ObjectStore.set_object_from_json(bucket, key, data) return uid
def _refresh_this_service_keys_and_certs(service_info, service_password): from Acquire.Service import Service as _Service service = _Service.from_data(service_info, service_password) if service._uid == "STAGE1": return service_info if not service.should_refresh_keys(): return service_info oldkeys = service.dump_keys(include_old_keys=False) # now write the old keys to storage from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import Mutex as _Mutex from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket bucket = _get_service_account_bucket() key = "%s/oldkeys/%s" % (_service_key, oldkeys["datetime"]) _ObjectStore.set_object_from_json(bucket, key, oldkeys) # now write the pointers from fingerprint to file... for fingerprint in oldkeys.keys(): if fingerprint not in ["datetime", "encrypted_passphrase"]: _ObjectStore.set_string_object( bucket, "%s/oldkeys/fingerprints/%s" % (_service_key, fingerprint), key) # generate new keys last_update = service.last_key_update() service.refresh_keys() # now lock the object store so that we are the only function # that can write the new keys to global state m = _Mutex(key=service.uid(), bucket=bucket) service_data = _ObjectStore.get_object_from_json(bucket, _service_key) service_info = _Service.from_data(service_data) if service_info.last_key_update() == last_update: # no-one else has beaten us - write the updated keys to global state _ObjectStore.set_object_from_json(bucket, _service_key, service.to_data(service_password)) m.unlock() return service_data
def save(self): """Save the metadata about this drive to the object store""" if self.is_null(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore bucket = _get_service_account_bucket() drive_key = self._drive_key() data = self.to_data() _ObjectStore.set_object_from_json(bucket, drive_key, data)
def get_trusted_services(): """Return a dictionary of all trusted services indexed by their type """ from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_this_service as _get_this_service from Acquire.Service import Service as _Service from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded # we already trust ourselves service = _get_this_service() trusted_services = {} trusted_services[service.service_type()] = [service] bucket = _get_service_account_bucket() uidkey = "_trusted/uid/" datas = _ObjectStore.get_all_objects(bucket, uidkey) for data in datas: remote_service = _Service.from_data(data) if remote_service.should_refresh_keys(): # need to update the keys in our copy of the service remote_service.refresh_keys() key = "%s/%s" % (uidkey, remote_service.uid()) _ObjectStore.set_object_from_json(bucket, key, remote_service.to_data()) if remote_service.service_type() in datas: datas[remote_service.service_type()].append(remote_service) else: datas[remote_service.service_type()] = [remote_service] return datas else: # this is running on the client from Acquire.Client import Wallet as _Wallet wallet = _Wallet() return wallet.get_services()
def save(self): """Save the current state of this LoginSession to the object store """ if self.is_null(): return from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = self._get_key() _ObjectStore.set_object_from_json(bucket=bucket, key=key, data=self.to_data())
def _credit(self, debit_note, bucket=None): """Credit the value in 'debit_note' to this account. If the debit_note shows that the payment is provisional then this will be recorded as accounts receivable. This will record the credit with the same UID as the debit identified in the debit_note, so that we can reconcile all credits against matching debits. """ if not isinstance(debit_note, _DebitNote): raise TypeError("The passed debit note must be a DebitNote") if debit_note.value() <= 0: return if bucket is None: bucket = _login_to_service_account() if debit_note.is_provisional(): encoded_value = _TransactionInfo.encode( _TransactionCode.ACCOUNT_RECEIVABLE, debit_note.value()) else: encoded_value = _TransactionInfo.encode(_TransactionCode.CREDIT, debit_note.value()) # create a UID and timestamp for this credit and record # it in the account now = self._get_safe_now() # we need to record the exact timestamp of this credit... timestamp = now.timestamp() # and to create a key to find this credit later. The key is made # up from the date and timestamp of the credit and a random string day_key = "%4d-%02d-%02d/%s" % (now.year, now.month, now.day, timestamp) uid = "%s/%s" % (day_key, str(_uuid.uuid4())[0:8]) item_key = "%s/%s/%s" % (self._key(), uid, encoded_value) # the line item records the UID of the debit note, so we can # find this debit note in the system and, from this, get the # original transaction in the transaction record l = _LineItem(debit_note.uid(), debit_note.authorisation()) _ObjectStore.set_object_from_json(bucket, item_key, l.to_data()) return (uid, timestamp)
def refresh_service_keys_and_certs(service, force_refresh=False): """This function will check if any key rotation is needed, and if so, it will automatically refresh the keys and certificates. The old keys and certificates will be stored in a database of old keys and certificates """ assert_running_service() if service._uid == "STAGE1": return service if (not force_refresh) and (not service.should_refresh_keys()): return service # ensure that the current keys are saved to the object store save_service_keys_to_objstore() # generate new keys last_update = service.last_key_update() service.refresh_keys() # now lock the object store so that we are the only function # that can write the new keys to global state from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.Service import Service as _Service from Acquire.ObjectStore import Mutex as _Mutex from Acquire.ObjectStore import ObjectStore as _ObjectStore bucket = _get_service_account_bucket() m = _Mutex(key=service.uid(), bucket=bucket) service_data = _ObjectStore.get_object_from_json(bucket, _service_key) service_info = _Service.from_data(service_data) if service_info.last_key_update() == last_update: # no-one else has beaten us - write the updated keys to global state _ObjectStore.set_object_from_json( bucket, _service_key, service.to_data(_get_service_password())) m.unlock() # clear the cache as we will need to load a new object clear_serviceinfo_cache() return get_this_service(need_private_access=True)
def _credit_receipt(self, debit_note, receipt, bucket=None): """Credit the value of the passed 'receipt' to this account. The receipt must be for a previous provisional credit, hence the money is awaiting transfer from accounts receivable. """ if not isinstance(receipt, _Receipt): raise TypeError("The passed receipt must be a Receipt") if not isinstance(debit_note, _DebitNote): raise TypeError("The passed debit note must be a DebitNote") if receipt.is_null(): return if bucket is None: bucket = _login_to_service_account() if receipt.receipted_value() != debit_note.value(): raise ValueError("The receipted value does not match the value " "of the debit note: %s versus %s" % (receipt.receipted_value(), debit_note.value())) encoded_value = _TransactionInfo.encode(_TransactionCode.SENT_RECEIPT, receipt.value(), receipt.receipted_value()) # create a UID and timestamp for this credit and record # it in the account now = self._get_safe_now() # we need to record the exact timestamp of this credit... timestamp = now.timestamp() # and to create a key to find this credit later. The key is made # up from the date and timestamp of the credit and a random string day_key = "%4d-%02d-%02d/%s" % (now.year, now.month, now.day, timestamp) uid = "%s/%s" % (day_key, str(_uuid.uuid4())[0:8]) item_key = "%s/%s/%s" % (self._key(), uid, encoded_value) l = _LineItem(debit_note.uid(), receipt.authorisation()) _ObjectStore.set_object_from_json(bucket, item_key, l.to_data()) return (uid, timestamp)
def set_cluster(cluster, authorisation=None, passphrase=None, user=None): """Function used to set the single compute cluster that is connected to this compute service. This must be authorised by an admin user of this compute service """ if not isinstance(cluster, Cluster): raise TypeError("The cluster must be type Cluster") resource = "set_cluster %s" % cluster.fingerprint() from Acquire.Client import Authorisation as _Authorisation if Cluster._is_running_service(): from Acquire.Service import get_this_service as _get_this_service service = _get_this_service(need_private_access=True) if authorisation is not None: if not isinstance(authorisation, _Authorisation): raise TypeError( "The authorisation must be type Authorisation") service.assert_admin_authorised(authorisation, resource) else: # we are rotating keys, so check the passphrase against # the old passphrase cluster = Cluster.get_cluster() cluster.verify_passphrase(passphrase=passphrase, resource="set_cluster") from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = "compute/cluster" _ObjectStore.set_object_from_json(bucket, key, cluster.to_data()) else: authorisation = _Authorisation(user=user, resource=resource) compute_service = cluster.compute_service() args = {"authorisation": authorisation.to_data(), "cluster": cluster.to_data()} compute_service.call_function(function="set_cluster", args=args)
def set_object_from_json(bucket, key, data): """ Wraps the Acquire set_object_from_json function Args: bucket (str): Bucket for data storage key (str): Key for data in bucket data: Data Returns: None """ return ObjectStore.set_object_from_json(bucket=bucket, key=key, data=data)
def save(self): """Save this WorkSheet to the object store Returns: None """ from Acquire.Service import assert_running_service \ as _assert_running_service _assert_running_service() if self.is_null(): return from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() from Acquire.ObjectStore import ObjectStore as _ObjectStore key = "worksheet/%s" % self.uid() _ObjectStore.set_object_from_json(bucket, key, self.to_data())
def _get_aclrules(self, user_guid, aclrules, bucket=None): """Load up the ACLRules for this group. If none are set, then either the passed ACLRules will be used, or the specified user will be set as the owner """ from Acquire.Identity import ACLRules as _ACLRules from Acquire.ObjectStore import ObjectStore as _ObjectStore if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() aclkey = self._acls_key() try: self._aclrules = _ACLRules.from_data( _ObjectStore.get_object_from_json(bucket=bucket, key=aclkey)) except: self._aclrules = None if self._aclrules is not None: return if aclrules is None: if user_guid is None: raise PermissionError( "You must specify the guid of the initial user who " "owns this account!") aclrules = _ACLRules.owner(user_guid=user_guid) elif not isinstance(aclrules, _ACLRules): raise TypeError("The ACLRules must be type ACLRules") _ObjectStore.set_object_from_json(bucket=bucket, key=aclkey, data=aclrules.to_data()) self._aclrules = aclrules
def create(user_uid, password, primary_password, device_uid=None): """Create the credentials for the user with specified user_uid, optionally logging in via the specified device_uid, using the specified password, to protect the passed "primary_password" This returns the OTP that has been created to be associated with these credentials """ from Acquire.Crypto import PrivateKey as _PrivateKey from Acquire.Crypto import OTP as _OTP from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import bytes_to_string as _bytes_to_string if device_uid is None: device_uid = user_uid privkey = _PrivateKey(name="user_creds_key %s" % user_uid) otp = _OTP() otpsecret = otp.encrypt(privkey.public_key()) primary_password = privkey.encrypt(primary_password) data = {"primary_password": _bytes_to_string(primary_password), "private_key": privkey.to_data(passphrase=password), "otpsecret": _bytes_to_string(otpsecret) } key = "%s/credentials/%s/%s" % (_user_root, user_uid, device_uid) bucket = _get_service_account_bucket() _ObjectStore.set_object_from_json(bucket=bucket, key=key, data=data) return otp
def _set_status(self, status): """Internal function to set the status of the session. This ensures that the data for the session is saved into the correct part of the object store """ if self.is_null(): raise PermissionError( "Cannot set the status of a null LoginSession") if status not in [ "approved", "pending", "denied", "suspicious", "logged_out" ]: raise ValueError("Cannot set an invalid status '%s'" % status) if status == self._status: return from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() key = self._get_key() try: _ObjectStore.delete_object(bucket=bucket, key=key) except: pass self._status = status key = self._get_key() _ObjectStore.set_object_from_json(bucket=bucket, key=key, data=self.to_data()) key = "%s/status/%s" % (_sessions_key, self._uid) _ObjectStore.set_string_object(bucket=bucket, key=key, string_data=status)
def _debit_refund(self, refund, bucket=None): """Debit the value of the passed 'refund' from this account. The refund must be for a previous completed credit. There is a risk that this value has been spent, so this is one of the only functions that allows a balance to drop below an overdraft or other limit (as the refund should always succeed). """ if not isinstance(refund, _Refund): raise TypeError("The passed refund must be a Refund") if refund.is_null(): return if bucket is None: bucket = _login_to_service_account() encoded_value = _TransactionInfo.encode(_TransactionCode.SENT_REFUND, refund.value()) # create a UID and timestamp for this debit and record # it in the account now = self._get_safe_now() # we need to record the exact timestamp of this credit... timestamp = now.timestamp() # and to create a key to find this debit later. The key is made # up from the date and timestamp of the debit and a random string day_key = "%4d-%02d-%02d/%s" % (now.year, now.month, now.day, timestamp) uid = "%s/%s" % (day_key, str(_uuid.uuid4())[0:8]) item_key = "%s/%s/%s" % (self._key(), uid, encoded_value) l = _LineItem(uid, refund.authorisation()) _ObjectStore.set_object_from_json(bucket, item_key, l.to_data()) return (uid, timestamp)
def _debit_receipt(self, receipt, bucket=None): """Debit the value of the passed 'receipt' from this account. The receipt must be for a previous provisional debit, hence the money should be available. """ if not isinstance(receipt, _Receipt): raise TypeError("The passed receipt must be a Receipt") if receipt.is_null(): return if bucket is None: bucket = _login_to_service_account() encoded_value = _TransactionInfo.encode( _TransactionCode.RECEIVED_RECEIPT, receipt.value(), receipt.receipted_value()) # create a UID and timestamp for this debit and record # it in the account now = self._get_safe_now() # we need to record the exact timestamp of this credit... timestamp = now.timestamp() # and to create a key to find this debit later. The key is made # up from the date and timestamp of the debit and a random string day_key = "%4d-%02d-%02d/%s" % (now.year, now.month, now.day, timestamp) uid = "%s/%s" % (day_key, str(_uuid.uuid4())[0:8]) item_key = "%s/%s/%s" % (self._key(), uid, encoded_value) l = _LineItem(uid, receipt.authorisation()) _ObjectStore.set_object_from_json(bucket, item_key, l.to_data()) return (uid, timestamp)
def get_trusted_service(service_url=None, service_uid=None, service_type=None, autofetch=True): """Return the trusted service info for the service with specified service_url or service_uid""" if service_url is not None: from Acquire.Service import Service as _Service service_url = _Service.get_canonical_url(service_url, service_type=service_type) from Acquire.Service import is_running_service as _is_running_service if _is_running_service(): from Acquire.Service import get_this_service as _get_this_service from Acquire.Service import Service as _Service from Acquire.Service import get_service_account_bucket as \ _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import url_to_encoded as \ _url_to_encoded service = _get_this_service() if service_url is not None and service.canonical_url() == service_url: # we trust ourselves :-) return service if service_uid is not None and service.uid() == service_uid: # we trust ourselves :-) return service bucket = _get_service_account_bucket() uidkey = None data = None if service_uid is not None: uidkey = "_trusted/uid/%s" % service_uid try: data = _ObjectStore.get_object_from_json(bucket, uidkey) except: pass elif service_url is not None: urlkey = "_trusted/url/%s" % _url_to_encoded(service_url) try: uidkey = _ObjectStore.get_string_object(bucket, urlkey) if uidkey is not None: data = _ObjectStore.get_object_from_json(bucket, uidkey) except: pass if data is not None: remote_service = _Service.from_data(data) if remote_service.should_refresh_keys(): # need to update the keys in our copy of the service remote_service.refresh_keys() if uidkey is not None: _ObjectStore.set_object_from_json(bucket, uidkey, remote_service.to_data()) return remote_service if not autofetch: from Acquire.Service import ServiceAccountError if service_uid is not None: raise ServiceAccountError( "We do not trust the service with UID '%s'" % service_uid) else: raise ServiceAccountError( "We do not trust the service at URL '%s'" % service_url) # we can try to fetch this data - we will ask our own # registry from Acquire.Registry import get_trusted_registry_service \ as _get_trusted_registry_service registry = _get_trusted_registry_service(service_uid=service.uid()) service = registry.get_service(service_uid=service_uid, service_url=service_url) from Acquire.Service import trust_service as _trust_service _trust_service(service) return service else: # this is running on the client from Acquire.Client import Wallet as _Wallet wallet = _Wallet() service = wallet.get_service(service_uid=service_uid, service_url=service_url, service_type=service_type, autofetch=autofetch) return service
def run(args): """This function will allow the current user to authorise a logout from the current session - this will be authorised by signing the request to logout""" status = 0 message = None session_uid = args["session_uid"] username = args["username"] permission = args["permission"] signature = string_to_bytes(args["signature"]) # generate a sanitised version of the username user_account = UserAccount(username) # now log into the central identity account to query # the current status of this login session bucket = login_to_service_account() user_session_key = "sessions/%s/%s" % \ (user_account.sanitised_name(), session_uid) request_session_key = "requests/%s/%s" % (session_uid[:8], session_uid) login_session = LoginSession.from_data( ObjectStore.get_object_from_json(bucket, user_session_key)) if login_session: # get the signing certificate from the login session and # validate that the permission object has been signed by # the user requesting the logout cert = login_session.public_certificate() cert.verify(signature, permission) # the signature was correct, so log the user out. For record # keeping purposes we change the loginsession to a logout state # and move it to another part of the object store if login_session.is_approved(): login_session.logout() # only save sessions that were successfully approved if login_session: if login_session.is_logged_out(): expired_session_key = "expired_sessions/%s/%s" % \ (user_account.sanitised_name(), session_uid) ObjectStore.set_object_from_json(bucket, expired_session_key, login_session.to_data()) try: ObjectStore.delete_object(bucket, user_session_key) except: pass try: ObjectStore.delete_object(bucket, request_session_key) except: pass status = 0 message = "Successfully logged out" return_value = create_return_value(status, message) return return_value