def list_drives(self, drive_uid=None): """Return a list of all of the top-level drives to which this user has access, or all of the sub-drives of the drive with passed 'drive_uid' """ if self.is_null(): return [] from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import encoded_to_string as _encoded_to_string from Acquire.Storage import DriveMeta as _DriveMeta bucket = _get_service_account_bucket() if drive_uid is None: # look for the top-level drives names = _ObjectStore.get_all_object_names( bucket, "%s/%s" % (_drives_root, self._user_guid)) else: # look for the subdrives names = _ObjectStore.get_all_object_names( bucket, "%s/%s/%s" % (_subdrives_root, self._user_guid, drive_uid)) drives = [] for name in names: drive_name = _encoded_to_string(name.split("/")[-1]) drives.append(_DriveMeta(name=drive_name, container=drive_uid)) return drives
def get_pending_job_uids(self, passphrase=None): """Return the UIDs of all of the jobs that need to be submitted""" if self.is_null(): return [] if Cluster._is_running_service(): from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket self.verify_passphrase(resource="get_pending_job_uids", passphrase=passphrase) bucket = _get_service_account_bucket() prefix = "compute/pending/" uids = _ObjectStore.get_all_object_names(bucket=bucket, prefix=prefix, without_prefix=True) return uids else: passphrase = self.passphrase(resource="get_pending_job_uids") args = {"passphrase": passphrase} result = self.compute_service().call_function( function="get_pending_job_uids", args=args) return self.decrypt_data(result["job_uids"])
def test_objstore(bucket): keys = [] message = "ƒƒƒ Hello World ∂∂∂" ObjectStore.set_string_object(bucket, "test", message) keys.append("test") assert (message == ObjectStore.get_string_object(bucket, "test")) message = "€€#¢∞ Hello ˚ƒ´πµçµΩ" ObjectStore.set_string_object(bucket, "test/something", message) keys.append("test/something") assert (message == ObjectStore.get_string_object(bucket, "test/something")) data = { "cat": "mieow", "dog": "woof", "sounds": [1, 2, 3, 4, 5], "flag": True } ObjectStore.set_object_from_json(bucket, "test/object", data) keys.append("test/object") assert (data == ObjectStore.get_object_from_json(bucket, "test/object")) names = ObjectStore.get_all_object_names(bucket) assert (len(names) == len(keys)) for name in names: assert (name in keys)
def get_object_names(bucket, prefix=None): """ List all the keys in the object store TODO - temp function, remove Args: bucket (dict): Bucket containing data Returns: list: List of keys in object store """ return ObjectStore.get_all_object_names(bukcet=bucket, prefix=prefix)
def exists(bucket, key): """ Checks if there is an object in the object store with the given key Args: bucket (dict): Bucket containing data key (str): Prefix for key in object store Returns: bool: True if exists in store """ # Get the object and use the key as a prefix name = ObjectStore.get_all_object_names(bucket, prefix=key) return len(name) > 0
def list_accounts(self, bucket=None): """Return the names of all of the accounts in this group""" if bucket is None: bucket = _login_to_service_account() keys = _ObjectStore.get_all_object_names(bucket, self._root()) accounts = [] for key in keys: accounts.append(_encoded_to_string(key)) return accounts
def login(credentials, user_uid=None, remember_device=False): """Login to the session with specified 'short_uid' with the user with passed 'username' and 'credentials', optionally specifying the user_uid """ if user_uid is None: # find all of the user_uids of accounts with this # username+password combination from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket from Acquire.Client import Credentials as _Credentials from Acquire.Identity import UserCredentials as _UserCredentials from Acquire.Service import get_this_service as _get_this_service if not isinstance(credentials, _Credentials): raise TypeError("The credentials must be type Credentials") bucket = _get_service_account_bucket() encoded_password = _UserCredentials.hash( username=credentials.username(), password=credentials.password()) prefix = "%s/passwords/%s/" % (_user_root, encoded_password) try: names = _ObjectStore.get_all_object_names(bucket=bucket, prefix=prefix) except: names = [] user_uids = [] for name in names: user_uids.append(name.split("/")[-1]) else: user_uids = [user_uid] if len(user_uids) == 0: from Acquire.Identity import UserValidationError raise UserValidationError("No user with name '%s'" % credentials.username()) from Acquire.Identity import UserCredentials as _UserCredentials return _UserCredentials.login(credentials=credentials, user_uids=user_uids, remember_device=remember_device)
def get_object_from_json(bucket, key): """ Removes the daterange from the passed key and uses the reduced key to get an object from the object store. Wraps the Acquire get_object_from_json function Args: bucket (dict): Bucket containing data key (str): Key for data in bucket Returns: Object: Object from store """ # Get the object and use the key as a prefix name = ObjectStore.get_all_object_names(bucket, prefix=key) if len(name) > 1: raise ValueError("There should only be one object with this key") return ObjectStore.get_object_from_json(bucket, name[0])
def _get_transaction_keys_between(self, start_time, end_time, bucket=None): """Return all of the object store keys for transactions in this account beteen 'start_time' and 'end_time' (inclusive, e.g. start_time <= transaction <= end_time). This will return an empty list if there were no transactions in this time """ if bucket is None: bucket = _login_to_service_account() if not isinstance(start_time, _datetime.datetime): raise TypeError("The start time must be a datetime object, " "not a %s" % start_time.__class__) if not isinstance(end_time, _datetime.datetime): raise TypeError("The end time must be a datetime object, " "not a %s" % end_time.__class__) start_day = start_time.toordinal() end_day = end_time.toordinal() start_timestamp = start_time.timestamp() end_timestamp = end_time.timestamp() keys = [] for day in range(start_day, end_day + 1): day_date = _datetime.datetime.fromordinal(day) prefix = "%s/%4d-%02d-%02d" % (self._key(), day_date.year, day_date.month, day_date.day) day_keys = _ObjectStore.get_all_object_names(bucket, prefix) for day_key in day_keys: try: timestamp = float(day_key.split("/")[0]) except: timestamp = 0 if timestamp >= start_timestamp and timestamp <= end_timestamp: keys.append("%s/%s" % (prefix, day_key)) return keys
def list_accounts(self, bucket=None): """Return the names of all of the accounts in this group Args: bucket (dict, default=None): Bucket from which to load data Returns: :obj:`list`: List of names of the accounts in this group """ self._assert_is_readable() if bucket is None: from Acquire.Service import get_service_account_bucket \ as _get_service_account_bucket bucket = _get_service_account_bucket() from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import encoded_to_string \ as _encoded_to_string keys = _ObjectStore.get_all_object_names(bucket, self._root()) root_len = len(self._root()) accounts = [] for key in keys: try: account_key = key[root_len:] while account_key.startswith("/"): account_key = account_key[1:] accounts.append(_encoded_to_string(account_key)) except Exception as e: from Acquire.Accounting import AccountError raise AccountError( "Unable to identify the account associated with key " "'%s', equals '%s': %s" % (key, account_key, str(e))) return accounts
def test_objstore(bucket): keys = [] message = "ƒƒƒ Hello World ∂∂∂" ObjectStore.set_string_object(bucket, "test", message) keys.append("test") assert(message == ObjectStore.get_string_object(bucket, "test")) message = "€€#¢∞ Hello ˚ƒ´πµçµΩ" ObjectStore.set_string_object(bucket, "test/something", message) keys.append("test/something") assert(message == ObjectStore.get_string_object(bucket, "test/something")) data = {"cat": "mieow", "dog": "woof", "sounds": [1, 2, 3, 4, 5], "flag": True} ObjectStore.set_object_from_json(bucket, "test/object", data) keys.append("test/object") assert(data == ObjectStore.get_object_from_json(bucket, "test/object")) names = ObjectStore.get_all_object_names(bucket) assert(len(names) == len(keys)) names = ObjectStore.get_all_object_names(bucket, "test") assert(len(names) == 3) names = ObjectStore.get_all_object_names(bucket, "test/") assert(len(names) == 2) names = ObjectStore.get_all_object_names(bucket, "test/some") assert(len(names) == 1) for name in names: assert(name in keys) new_bucket = ObjectStore.create_bucket(bucket, "new_bucket") ObjectStore.set_object_from_json(new_bucket, "test/object2", data) assert(data == ObjectStore.get_object_from_json(new_bucket, "test/object2")) with pytest.raises(ObjectStoreError): new_bucket = ObjectStore.create_bucket(bucket, "testing_objstore") with pytest.raises(ObjectStoreError): new_bucket = ObjectStore.create_bucket(bucket, "new_bucket") with pytest.raises(ObjectStoreError): new_bucket = ObjectStore.get_bucket(bucket, "get_bucket", create_if_needed=False) new_bucket = ObjectStore.get_bucket(bucket, "get_bucket", create_if_needed=True) test_key = "test_string" test_value = "test_string_value" ObjectStore.set_string_object(new_bucket, test_key, test_value) new_bucket2 = ObjectStore.get_bucket(bucket, "get_bucket", create_if_needed=False) test_value2 = ObjectStore.get_string_object(new_bucket2, test_key) assert(test_value == test_value2)
def _reconcile_daily_accounts(self, bucket=None): """Internal function used to reconcile the daily accounts. This ensures that every line item transaction is summed up so that the starting balance for each day is recorded into the object store """ if self.is_null(): return if bucket is None: bucket = _login_to_service_account() # work back from today to the first day of the account to calculate # all of the daily balances... We need to record every day of the # account to support quick lookups today = _datetime.datetime.now().toordinal() day = today last_data = None num_missing_days = 0 while last_data is None: daytime = _datetime.datetime.fromordinal(day) key = self._get_balance_key(daytime) last_data = _ObjectStore.get_object_from_json(bucket, key) if last_data is None: day -= 1 num_missing_days += 1 if num_missing_days > 100: # we need another strategy to find the last balance break if last_data is None: # find the latest day by reading the keys in the object # store directly root = "%s/balance/" % self._key() keys = _ObjectStore.get_all_object_names(bucket, root) if keys is None or len(keys) == 0: raise AccountError("There is no daily balance recorded for " "the account with UID %s" % self.uid()) # the encoding of the keys is such that, when sorted, the # last key must be the latest balance keys.sort() last_data = _ObjectStore.get_object_from_json( bucket, "%s%s" % (root, keys[-1])) day = _get_day_from_key(keys[-1]).toordinal() if last_data is None: raise AccountError("How can there be no data for key %s?" % keys[-1]) # what was the balance on the last day? result = (_create_decimal(last_data["balance"]), _create_decimal(last_data["liability"]), _create_decimal(last_data["receivable"])) # ok, now we go from the last day until today and sum up the # line items from each day to create the daily balances # (not including today, as we only want the balance at the beginning # of today) for d in range(day + 1, today + 1): day_time = _datetime.datetime.fromordinal(d) transaction_keys = self._get_transaction_keys_between( _datetime.datetime.fromordinal(d - 1), day_time) total = _sum_transactions(transaction_keys) result = (result[0] + total[0], result[1] + total[1], result[2] + total[2]) balance_key = self._get_balance_key(day_time) data = {} data["balance"] = str(result[0]) data["liability"] = str(result[1]) data["receivable"] = str(result[2]) _ObjectStore.set_object_from_json(bucket, balance_key, data)
def register_service(self, service, force_new_uid=False): """Register the passed service""" from Acquire.Service import Service as _Service from Acquire.ObjectStore import ObjectStore as _ObjectStore if not isinstance(service, _Service): raise TypeError("You can only register Service objects") if service.uid() != "STAGE1": raise PermissionError("You cannot register a service twice!") # first, stop a single domain monopolising resources... bucket = self.get_bucket() domain = self._get_domain(service.service_url()) domainroot = self._get_root_key_for_domain(domain=domain) try: pending_keys = _ObjectStore.get_all_object_names( bucket=bucket, prefix="%s/pending/" % domainroot) num_pending = len(pending_keys) except: num_pending = 0 if num_pending >= 4: raise PermissionError( "You cannot register a new service as you have reached " "the quota (4) for the number of pending services registered " "against the domain '%s'. Please get some of these services " "so that you can make them active." % domain) try: active_keys = _ObjectStore.get_all_object_names( bucket=bucket, prefix="%s/active/" % domainroot) num_active = len(active_keys) except: num_active = 0 if num_active + num_pending >= 16: raise PermissionError( "You cannot register a new service as you have reached " "the quota (16) for the number registered against the " "domain '%s'" % domain) # first, challenge the service to ensure that it exists # and our keys are correct service = self.challenge_service(service) if service.uid() != "STAGE1": raise PermissionError("You cannot register a service twice!") bucket = self.get_bucket() urlkey = self._get_key_for_url(service.canonical_url()) try: uidkey = _ObjectStore.get_string_object(bucket=bucket, key=urlkey) except: uidkey = None service_uid = None if uidkey is not None: # there is already a service registered at this domain. Since # we have successfully challenged the service, this must be # someone re-bootstrapping a service. It is safe to give them # back their UID if requested if not force_new_uid: service_uid = self._get_uid_from_key(uidkey) if service_uid is None: # how many services from this domain are still pending? service_uid = _generate_service_uid( bucket=self.get_bucket(), registry_uid=self.registry_uid()) # save this service to the object store uidkey = self._get_key_for_uid(service_uid) _ObjectStore.set_object_from_json(bucket=bucket, key=uidkey, data=service.to_data()) _ObjectStore.set_string_object(bucket=bucket, key=urlkey, string_data=uidkey) domainkey = self._get_root_key_for_domain(domain=domain) _ObjectStore.set_string_object( bucket=bucket, key="%s/pending/%s" % (domainkey, service_uid), string_data=uidkey) return service_uid
def run(args): """This function is called by the user to log in and validate that a session is authorised to connect""" status = 0 message = None provisioning_uri = None assigned_device_uid = None short_uid = args["short_uid"] username = args["username"] password = args["password"] otpcode = args["otpcode"] try: remember_device = args["remember_device"] except: remember_device = False try: device_uid = args["device_uid"] except: device_uid = None # create the user account for the user user_account = UserAccount(username) # log into the central identity account to query # the current status of this login session bucket = login_to_service_account() # locate the session referred to by this uid base_key = "requests/%s" % short_uid session_keys = ObjectStore.get_all_object_names(bucket, base_key) # try all of the sessions to find the one that the user # may be referring to... login_session_key = None request_session_key = None for session_key in session_keys: request_session_key = "%s/%s" % (base_key, session_key) session_user = ObjectStore.get_string_object( bucket, request_session_key) # did the right user request this session? if user_account.name() == session_user: if login_session_key: # this is an extremely unlikely edge case, whereby # two login requests within a 30 minute interval for the # same user result in the same short UID. This should be # signified as an error and the user asked to create a # new request raise LoginError( "You have found an extremely rare edge-case " "whereby two different login requests have randomly " "obtained the same short UID. As we can't work out " "which request is valid, the login is denied. Please " "create a new login request, which will then have a " "new login request UID") else: login_session_key = session_key if not login_session_key: raise LoginError( "There is no active login request with the " "short UID '%s' for user '%s'" % (short_uid, username)) login_session_key = "sessions/%s/%s" % (user_account.sanitised_name(), login_session_key) # fully load the user account from the object store so that we # can validate the username and password try: account_key = "accounts/%s" % user_account.sanitised_name() user_account = UserAccount.from_data( ObjectStore.get_object_from_json(bucket, account_key)) except: raise LoginError("No account available with username '%s'" % username) if (not remember_device) and device_uid: # see if this device has been seen before device_key = "devices/%s/%s" % (user_account.sanitised_name(), device_uid) try: device_secret = ObjectStore.get_string_object(bucket, device_key) except: device_secret = None if device_secret is None: raise LoginError( "The login device is not recognised. Please try to " "log in again using your master one-time-password.") else: device_secret = None # now try to log into this account using the supplied # password and one-time-code try: if device_secret: user_account.validate_password(password, otpcode, device_secret=device_secret) elif remember_device: (device_secret, provisioning_uri) = \ user_account.validate_password( password, otpcode, remember_device=True) device_uid = str(uuid.uuid4()) device_key = "devices/%s/%s" % (user_account.sanitised_name(), device_uid) assigned_device_uid = device_uid else: user_account.validate_password(password, otpcode) except: # don't leak info about why validation failed raise LoginError("The password or OTP code is incorrect") # the user is valid - load up the actual login session login_session = LoginSession.from_data( ObjectStore.get_object_from_json(bucket, login_session_key)) # we must record the session against which this otpcode has # been validated. This is to stop us validating an otpcode more than # once (e.g. if the password and code have been intercepted). # Any sessions validated using the same code should be treated # as immediately suspcious otproot = "otps/%s" % user_account.sanitised_name() sessions = ObjectStore.get_all_strings(bucket, otproot) utcnow = datetime.datetime.utcnow() for session in sessions: otpkey = "%s/%s" % (otproot, session) otpstring = ObjectStore.get_string_object(bucket, otpkey) (timestamp, code) = otpstring.split("|||") # remove all codes that are more than 10 minutes old. The # otp codes are only valid for 3 minutes, so no need to record # codes that have been used that are older than that... timedelta = utcnow - datetime.datetime.fromtimestamp( float(timestamp)) if timedelta.seconds > 600: try: ObjectStore.delete_object(bucket, otpkey) except: pass elif code == str(otpcode): # Low probability there is some recycling, # but very suspicious if the code was validated within the last # 10 minutes... (as 3 minute timeout of a code) suspect_key = "sessions/%s/%s" % ( user_account.sanitised_name(), session) suspect_session = None try: suspect_session = LoginSession.from_data( ObjectStore.get_object_from_json(bucket, suspect_key)) except: pass if suspect_session: suspect_session.set_suspicious() ObjectStore.set_object_from_json(bucket, suspect_key, suspect_session.to_data()) raise LoginError( "Cannot authorise the login as the one-time-code " "you supplied has already been used within the last 10 " "minutes. The chance of this happening is really low, so " "we are treating this as a suspicious event. You need to " "try another code. Meanwhile, the other login that used " "this code has been put into a 'suspicious' state.") # record the value and timestamp of when this otpcode was used otpkey = "%s/%s" % (otproot, login_session.uuid()) otpstring = "%s|||%s" % (datetime.datetime.utcnow().timestamp(), otpcode) ObjectStore.set_string_object(bucket, otpkey, otpstring) login_session.set_approved() # write this session back to the object store ObjectStore.set_object_from_json(bucket, login_session_key, login_session.to_data()) # save the device secret as everything has now worked if assigned_device_uid: ObjectStore.set_string_object(bucket, device_key, device_secret) # finally, remove this from the list of requested logins try: ObjectStore.delete_object(bucket, request_session_key) except: pass status = 0 message = "Success: Status = %s" % login_session.status() return_value = create_return_value(status, message) if provisioning_uri: return_value["provisioning_uri"] = provisioning_uri return_value["device_uid"] = assigned_device_uid return return_value
def list_files(self, authorisation=None, par=None, identifiers=None, include_metadata=False, dir=None, filename=None): """Return the list of FileMeta data for the files contained in this Drive. The passed authorisation is needed in case the list contents of this drive is not public. If 'dir' is specified, then only search for files in 'dir'. If 'filename' is specified, then only search for the file called 'filename' """ (drive_acl, identifiers) = self._resolve_acl(authorisation=authorisation, resource="list_files", par=par, identifiers=identifiers) if par is not None: if par.location().is_file(): dir = None filename = par.location().filename() elif not par.location().is_drive(): raise PermissionError( "You do not have permission to read the Drive") if not drive_acl.is_readable(): raise PermissionError( "You don't have permission to read this Drive") from Acquire.ObjectStore import ObjectStore as _ObjectStore from Acquire.ObjectStore import encoded_to_string as _encoded_to_string from Acquire.ObjectStore import string_to_encoded as _string_to_encoded from Acquire.Storage import FileMeta as _FileMeta metadata_bucket = self._get_metadata_bucket() if filename is not None: if dir is not None: filename = "%s/%s" % (dir, filename) key = "%s/%s/%s" % (_fileinfo_root, self._drive_uid, _string_to_encoded(filename)) names = [key] elif dir is not None: while dir.endswith("/"): dir = dir[0:-1] encoded_dir = _string_to_encoded(dir) while encoded_dir.endswith("="): encoded_dir = encoded_dir[0:-1] # remove the last two characters, as sometime uuencoding # will change the last characters so they don't match if len(encoded_dir) > 2: encoded_dir = encoded_dir[0:-2] else: encoded_dir = "" key = "%s/%s/%s" % (_fileinfo_root, self._drive_uid, encoded_dir) all_names = _ObjectStore.get_all_object_names(metadata_bucket, key) names = [] dir = "%s/" % dir for name in all_names: decoded_name = _encoded_to_string(name.split("/")[-1]) if decoded_name.startswith(dir): names.append(name) else: key = "%s/%s" % (_fileinfo_root, self._drive_uid) names = _ObjectStore.get_all_object_names(metadata_bucket, key) files = [] if include_metadata: # we need to load all of the metadata info for this file to # return to the user from Acquire.Storage import FileInfo as _FileInfo for name in names: try: data = _ObjectStore.get_object_from_json( metadata_bucket, name) fileinfo = _FileInfo.from_data(data, identifiers=identifiers, upstream=drive_acl) filemeta = fileinfo.get_filemeta() file_acl = filemeta.acl() if file_acl.is_readable() or file_acl.is_writeable(): files.append(filemeta) except: pass else: for name in names: filename = _encoded_to_string(name.split("/")[-1]) files.append(_FileMeta(filename=filename)) return files
def run(args): """This function will allow a user to request a new session that will be validated by the passed public key and public signing certificate. This will return a URL that the user must connect to to then log in and validate that request. """ status = 0 message = None login_url = None login_uid = None user_uid = None username = args["username"] public_key = PublicKey.from_data(args["public_key"]) public_cert = PublicKey.from_data(args["public_certificate"]) ip_addr = None hostname = None login_message = None try: ip_addr = args["ipaddr"] except: pass try: hostname = args["hostname"] except: pass try: login_message = args["message"] except: pass # generate a sanitised version of the username user_account = UserAccount(username) # Now generate a login session for this request login_session = LoginSession(public_key, public_cert, ip_addr, hostname, login_message) # now log into the central identity account to record # that a request to open a login session has been opened bucket = login_to_service_account() # first, make sure that the user exists... account_key = "accounts/%s" % user_account.sanitised_name() try: existing_data = ObjectStore.get_object_from_json(bucket, account_key) except: existing_data = None if existing_data is None: raise InvalidLoginError("There is no user with name '%s'" % username) user_account = UserAccount.from_data(existing_data) user_uid = user_account.uid() # first, make sure that the user doens't have too many open # login sessions at once - this prevents denial of service user_session_root = "sessions/%s" % user_account.sanitised_name() open_sessions = ObjectStore.get_all_object_names(bucket, user_session_root) # take the opportunity to prune old user login sessions prune_expired_sessions(bucket, user_account, user_session_root, open_sessions) # this is the key for the session in the object store user_session_key = "%s/%s" % (user_session_root, login_session.uuid()) ObjectStore.set_object_from_json(bucket, user_session_key, login_session.to_data()) # we will record a pointer to the request using the short # UUID. This way we can give a simple URL. If there is a clash, # then we will use the username provided at login to find the # correct request from a much smaller pool (likely < 3) request_key = "requests/%s/%s" % (login_session.short_uuid(), login_session.uuid()) ObjectStore.set_string_object(bucket, request_key, user_account.name()) status = 0 # the login URL is the URL of this identity service plus the # short UID of the session login_url = "%s/s?id=%s" % (get_service_info().service_url(), login_session.short_uuid()) login_uid = login_session.uuid() message = "Success: Login via %s" % login_url return_value = create_return_value(status, message) if login_uid: return_value["session_uid"] = login_uid if login_url: return_value["login_url"] = login_url else: return_value["login_url"] = None if user_uid: return_value["user_uid"] = user_uid return return_value