Exemplo n.º 1
0
def run(args):
    """This function will allow anyone to query the current login
       status of the session with passed UID"""

    status = 0
    message = None
    session_status = None

    session_uid = args["session_uid"]
    username = args["username"]

    # generate a sanitised version of the username
    user_account = UserAccount(username)

    # now log into the central identity account to query
    # the current status of this login session
    bucket = login_to_service_account()

    user_session_key = "sessions/%s/%s" % \
        (user_account.sanitised_name(), session_uid)

    try:
        login_session = LoginSession.from_data(
            ObjectStore.get_object_from_json(bucket, user_session_key))
    except:
        login_session = None

    if login_session is None:
        user_session_key = "expired_sessions/%s/%s" % \
                                (user_account.sanitised_name(),
                                    session_uid)

        login_session = LoginSession.from_data(
            ObjectStore.get_object_from_json(bucket, user_session_key))

    if login_session is None:
        raise InvalidSessionError("Cannot find the session '%s'" % session_uid)

    status = 0
    message = "Success: Status = %s" % login_session.status()
    session_status = login_session.status()

    return_value = create_return_value(status, message)

    if session_status:
        return_value["session_status"] = session_status

    return return_value
Exemplo n.º 2
0
    def get_cluster():
        """Return a handle to the single compute cluster that is
           connected to this compute service
        """
        if not Cluster._is_running_service():
            raise PermissionError(
                "You can only call 'get_cluster' on the compute service")

        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        bucket = _get_service_account_bucket()
        key = "compute/cluster"

        try:
            data = _ObjectStore.get_object_from_json(bucket, key)
        except:
            data = None

        if data is None:
            from Acquire.Service import ServiceError
            raise ServiceError(
                "You have not set the cluster that will be used to actually "
                "run the compute jobs!")

        return Cluster.from_data(data)
Exemplo n.º 3
0
def test_objstore(bucket):
    keys = []

    message = "ƒƒƒ Hello World ∂∂∂"

    ObjectStore.set_string_object(bucket, "test", message)
    keys.append("test")

    assert (message == ObjectStore.get_string_object(bucket, "test"))

    message = "€€#¢∞ Hello ˚ƒ´πµçµΩ"

    ObjectStore.set_string_object(bucket, "test/something", message)
    keys.append("test/something")

    assert (message == ObjectStore.get_string_object(bucket, "test/something"))

    data = {
        "cat": "mieow",
        "dog": "woof",
        "sounds": [1, 2, 3, 4, 5],
        "flag": True
    }

    ObjectStore.set_object_from_json(bucket, "test/object", data)
    keys.append("test/object")

    assert (data == ObjectStore.get_object_from_json(bucket, "test/object"))

    names = ObjectStore.get_all_object_names(bucket)

    assert (len(names) == len(keys))

    for name in names:
        assert (name in keys)
Exemplo n.º 4
0
    def load(self, par_uid, secret=None):
        """Load and return the PAR and identifiers associated with
           the passed UID, locked with the passed secret
        """
        # validate that the UID actually looks like a UID. This
        # should prevent attacks that try weird UIDs
        from Acquire.ObjectStore import validate_is_uid \
            as _validate_is_uid
        _validate_is_uid(par_uid)

        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        try:
            key = "%s/%s" % (_par_root, par_uid)
            bucket = _get_service_account_bucket()
            data = _ObjectStore.get_object_from_json(bucket, key)

            from Acquire.Client import PAR as _PAR
            import json as _json
            par = _PAR.from_data(data["par"])
            identifiers = _json.loads(data["identifiers"])

            if secret != data["secret"]:
                raise PermissionError()
        except:
            raise PermissionError("There is no valid PAR at ID '%s'" % par_uid)

        if par.expired():
            raise PermissionError(
                "There is no valid PAR at ID '%s' as it has expired" % par_uid)

        return (par, identifiers)
Exemplo n.º 5
0
    def load_transaction(uid, bucket=None):
        """Load the transactionrecord with UID=uid from the ledger

           Args:
                uid (str): UID of transaction to load
                bucket (dict, default=None): Bucket to load data from
           Returns:
                TransactionRecord: Transaction with that UID

        """
        if bucket is None:
            from Acquire.Service import get_service_account_bucket \
                as _get_service_account_bucket
            bucket = _get_service_account_bucket()

        from Acquire.Accounting import TransactionRecord as _TransactionRecord
        from Acquire.ObjectStore import ObjectStore as _ObjectStore

        data = _ObjectStore.get_object_from_json(bucket, Ledger.get_key(uid))

        if data is None:
            from Acquire.Accounting import LedgerError
            raise LedgerError("There is no transaction recorded in the "
                              "ledger with UID=%s (at key %s)" %
                              (uid, Ledger.get_key(uid)))

        return _TransactionRecord.from_data(data)
Exemplo n.º 6
0
    def close_downloader(self, downloader_uid, file_uid, secret):
        """Close the downloader associated with the passed
           downloader_uid and file_uid,
           authenticated using the passed secret
        """
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        bucket = _get_service_account_bucket()
        key = "%s/%s/%s/%s" % (_downloader_root, self._drive_uid, file_uid,
                               downloader_uid)

        try:
            data = _ObjectStore.get_object_from_json(bucket, key)
        except:
            data = None

        if data is None:
            # the downloader has already been closed
            return

        shared_secret = data["secret"]

        if secret != shared_secret:
            raise PermissionError(
                "Invalid request - you do not have permission to "
                "close this downloader")

        try:
            _ObjectStore.take_object_from_json(bucket, key)
        except:
            pass
Exemplo n.º 7
0
def _generate_service_uid(bucket, registry_uid):
    """Function to generate a new service_uid on this registry.

       The UIDs have the form a0-a0, when "a" is any letter from [a-zA-Z]
       and "0" is any number from [0-9]. This give 520 possible values
       for each part either side of the hyphen.

       The part on the left of the hypen is the root UID, which
       matches the root of the service_uid of the registry service
       that registered this service (the service_uid of a registry
       service has the UID root-root).

       If more than 520 values are needed, then either side of the
       ID can be extended by additional pairs of a0 digits, using
       a "." to separate pairs, e.g.

       the service_uid for registry b4-b4 that comes after
       b4-Z9.Z9.Z9 is b4-a0.a0.a0.a0

       similarly, the registry after Z9 is A0-A0.

       This means that

       a0.a0-a0.a0.a0.a0

       would be a perfectly valid ID. We would only need IDs of this
       length if we have ~270k registry services, and this service_uid
       came from a service that had registered ~73 trillion services...

       The registry root Z9, with registry Z9-Z9 is reserved for
       the temporary registry created during testing
    """
    from Acquire.ObjectStore import ObjectStore as _ObjectStore
    from Acquire.ObjectStore import Mutex as _Mutex

    root = registry_uid.split("-")[0]

    key = "%s/last_service_uid" % _registry_key

    mutex = _Mutex(key=key)

    try:
        last_vals = _ObjectStore.get_object_from_json(bucket=bucket,
                                                      key=key)
        last_vals = _inc_uid(last_vals)
    except:
        last_vals = [0, 0]

    service_uid = "%s-%s" % (root, _to_uid(last_vals))

    while service_uid == registry_uid:
        last_vals = _inc_uid(last_vals)
        service_uid = "%s-%s" % (root, _to_uid(last_vals))

    _ObjectStore.set_object_from_json(bucket=bucket, key=key, data=last_vals)
    mutex.unlock()

    return service_uid
Exemplo n.º 8
0
    def _get_daily_balance(self, bucket=None, datetime=None):
        """Get the daily starting balance for the passed datetime. This
           returns a tuple of
           (balance, liability, receivable).

           where 'balance' is the current real balance of the account,
           neglecting any outstanding liabilities or accounts receivable,
           where 'liability' is the current total liabilities,
           where 'receivable' is the current total accounts receivable, and

           If datetime is None then todays daily balance is returned. The
           daily balance is the balance at the start of the day. The
           actual balance at a particular time will be this starting
           balance plus/minus all of the transactions between the start
           of that day and the specified datetime
        """
        if self.is_null():
            return

        if bucket is None:
            bucket = _login_to_service_account()

        if datetime is None:
            datetime = _datetime.datetime.now()

        balance_key = self._get_balance_key(datetime)

        data = _ObjectStore.get_object_from_json(bucket, balance_key)

        if data is None:
            # there is no balance for this day. This means that we haven'y
            # yet calculated that day's balance. Do the accounting necessary
            # to construct that day's starting balance
            self._reconcile_daily_accounts(bucket)

            data = _ObjectStore.get_object_from_json(bucket, balance_key)

            if data is None:
                raise AccountError("The daily balance for account at date %s "
                                   "is not available" % str(datetime))

        return (_create_decimal(data["balance"]),
                _create_decimal(data["liability"]),
                _create_decimal(data["receivable"]))
Exemplo n.º 9
0
    def _load_account(self, bucket=None):
        """Load the current state of the account from the object store"""
        if self.is_null():
            return

        if bucket is None:
            bucket = _login_to_service_account()

        data = _ObjectStore.get_object_from_json(bucket, self._key())
        self.__dict__ = _copy(Account.from_data(data).__dict__)
Exemplo n.º 10
0
def run(args):
    """This function is used to request access to a bucket for
       data in the object store. The user can request read-only
       or read-write access. Access is granted based on a permission
       list
    """

    status = 0
    message = None

    access_token = None

    user_uuid = args["user_uuid"]
    identity_service_url = args["identity_service"]

    # log into the central access account
    bucket = login_to_service_account()

    # is the identity service supplied by the user one that we trust?
    identity_service = Service.from_data(
        ObjectStore.get_object_from_json(bucket,
                                         "services/%s" % identity_service_url))

    if not identity_service:
        raise RequestBucketError(
            "You cannot request a bucket because "
            "this access service does not know or trust your supplied "
            "identity service (%s)" % identity_service_url)

    if not identity_service.is_identity_service():
        raise RequestBucketError(
            "You cannot request a bucket because "
            "the passed service (%s) is not an identity service. It is "
            "a %s" % (identity_service_url, identity_service.service_type()))

    # Since we trust this identity service, we can ask it to give us the
    # public certificate and signing certificate for this user.
    key = PrivateKey()

    response = call_function(identity_service_url,
                             "get_user_keys",
                             args_key=identity_service.public_key(),
                             response_key=key,
                             user_uuid=user_uuid)

    status = 0
    message = "Success: Status = %s" % str(response)

    return_value = create_return_value(status, message)

    if access_token:
        return_value["access_token"] = access_token

    return return_value
Exemplo n.º 11
0
def get_trusted_service_info(service_url):
    """Return the trusted service info for 'service_url'"""
    bucket = _login_to_service_account()
    data = _ObjectStore.get_object_from_json(
                            bucket,
                            "services/%s" % url_to_encoded(service_url))

    if data is None:
        raise ServiceAccountError("We do not trust the service at '%s'" %
                                  service_url)

    return _Service.from_data(data)
Exemplo n.º 12
0
    def load_transaction(uid, bucket=None):
        """Load the transactionrecord with UID=uid from the ledger"""
        if bucket is None:
            bucket = _login_to_service_account()

        data = _ObjectStore.get_object_from_json(bucket, Ledger.get_key(uid))

        if data is None:
            raise LedgerError("There is no transaction recorded in the "
                              "ledger with UID=%s (at key %s)" %
                              (uid, Ledger.get_key(uid)))

        return _TransactionRecord.from_data(data)
Exemplo n.º 13
0
    def close_uploader(self, file_uid, secret):
        """Close the uploader associated with the passed file_uid,
           authenticated using the passed secret
        """
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        bucket = _get_service_account_bucket()
        key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid)

        try:
            data = _ObjectStore.get_object_from_json(bucket, key)
        except:
            data = None

        if data is None:
            # the uploader has already been closed
            return

        shared_secret = data["secret"]

        if secret != shared_secret:
            raise PermissionError(
                "Invalid request - you do not have permission to "
                "close this uploader")

        try:
            data2 = _ObjectStore.take_object_from_json(bucket, key)
        except:
            data2 = None

        if data2 is None:
            # someone else is already in the process of closing
            # this uploader - let them do it!
            return

        filename = data["filename"]
        version = data["version"]

        # now get the FileInfo for this file
        from Acquire.Storage import FileInfo as _FileInfo
        fileinfo = _FileInfo.load(drive=self,
                                  filename=filename,
                                  version=version)

        file_key = data["filekey"]
        file_bucket = self._get_file_bucket(file_key)
        fileinfo.close_uploader(file_bucket=file_bucket)
        fileinfo.save()
Exemplo n.º 14
0
    def upload_chunk(self, file_uid, chunk_index, secret, chunk, checksum):
        """Upload a chunk of the file with UID 'file_uid'. This is the
           chunk at index 'chunk_idx', which is set equal to 'chunk'
           (validated with 'checksum'). The passed secret is used to
           authenticate this upload. The secret should be the
           multi_md5 has of the shared secret with the concatenated
           drive_uid, file_uid and chunk_index
        """
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        bucket = _get_service_account_bucket()
        key = "%s/%s/%s" % (_uploader_root, self._drive_uid, file_uid)
        data = _ObjectStore.get_object_from_json(bucket, key)
        shared_secret = data["secret"]

        from Acquire.Crypto import Hash as _Hash
        shared_secret = _Hash.multi_md5(
            shared_secret, "%s%s%d" % (self._drive_uid, file_uid, chunk_index))

        if secret != shared_secret:
            raise PermissionError(
                "Invalid chunked upload secret. You do not have permission "
                "to upload chunks to this file!")

        # validate the data checksum
        check = _Hash.md5(chunk)

        if check != checksum:
            from Acquire.Storage import FileValidationError
            raise FileValidationError(
                "Invalid checksum for chunk: %s versus %s" % (check, checksum))

        meta = {
            "filesize": len(chunk),
            "checksum": checksum,
            "compression": "bz2"
        }

        file_key = data["filekey"]
        chunk_index = int(chunk_index)

        file_bucket = self._get_file_bucket(file_key)
        data_key = "%s/data/%d" % (file_key, chunk_index)
        meta_key = "%s/meta/%d" % (file_key, chunk_index)

        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        _ObjectStore.set_object_from_json(file_bucket, meta_key, meta)
        _ObjectStore.set_object(file_bucket, data_key, chunk)
Exemplo n.º 15
0
def _refresh_this_service_keys_and_certs(service_info, service_password):
    from Acquire.Service import Service as _Service
    service = _Service.from_data(service_info, service_password)

    if service._uid == "STAGE1":
        return service_info

    if not service.should_refresh_keys():
        return service_info

    oldkeys = service.dump_keys(include_old_keys=False)

    # now write the old keys to storage
    from Acquire.ObjectStore import ObjectStore as _ObjectStore
    from Acquire.ObjectStore import Mutex as _Mutex
    from Acquire.Service import get_service_account_bucket as \
        _get_service_account_bucket

    bucket = _get_service_account_bucket()
    key = "%s/oldkeys/%s" % (_service_key, oldkeys["datetime"])
    _ObjectStore.set_object_from_json(bucket, key, oldkeys)

    # now write the pointers from fingerprint to file...
    for fingerprint in oldkeys.keys():
        if fingerprint not in ["datetime", "encrypted_passphrase"]:
            _ObjectStore.set_string_object(
                bucket,
                "%s/oldkeys/fingerprints/%s" % (_service_key, fingerprint),
                key)

    # generate new keys
    last_update = service.last_key_update()
    service.refresh_keys()

    # now lock the object store so that we are the only function
    # that can write the new keys to global state
    m = _Mutex(key=service.uid(), bucket=bucket)

    service_data = _ObjectStore.get_object_from_json(bucket, _service_key)
    service_info = _Service.from_data(service_data)

    if service_info.last_key_update() == last_update:
        # no-one else has beaten us - write the updated keys to global state
        _ObjectStore.set_object_from_json(bucket, _service_key,
                                          service.to_data(service_password))

    m.unlock()

    return service_data
Exemplo n.º 16
0
def refresh_service_keys_and_certs(service, force_refresh=False):
    """This function will check if any key rotation is needed, and
       if so, it will automatically refresh the keys and certificates.
       The old keys and certificates will be stored in a database of
       old keys and certificates
    """
    assert_running_service()

    if service._uid == "STAGE1":
        return service

    if (not force_refresh) and (not service.should_refresh_keys()):
        return service

    # ensure that the current keys are saved to the object store
    save_service_keys_to_objstore()

    # generate new keys
    last_update = service.last_key_update()
    service.refresh_keys()

    # now lock the object store so that we are the only function
    # that can write the new keys to global state
    from Acquire.Service import get_service_account_bucket as \
        _get_service_account_bucket
    from Acquire.Service import Service as _Service
    from Acquire.ObjectStore import Mutex as _Mutex
    from Acquire.ObjectStore import ObjectStore as _ObjectStore

    bucket = _get_service_account_bucket()
    m = _Mutex(key=service.uid(), bucket=bucket)

    service_data = _ObjectStore.get_object_from_json(bucket, _service_key)
    service_info = _Service.from_data(service_data)

    if service_info.last_key_update() == last_update:
        # no-one else has beaten us - write the updated keys to global state
        _ObjectStore.set_object_from_json(
            bucket, _service_key, service.to_data(_get_service_password()))

    m.unlock()

    # clear the cache as we will need to load a new object
    clear_serviceinfo_cache()

    return get_this_service(need_private_access=True)
Exemplo n.º 17
0
def _get_service_info_data():
    """Internal function that loads up the service info data from
       the object store.
    """
    bucket = _login_to_service_account()

    # find the service info from the object store
    service_key = "_service_info"

    service = _ObjectStore.get_object_from_json(bucket, service_key)

    if not service:
        raise MissingServiceAccountError(
            "You haven't yet created the service account "
            "for this service. Please create an account first.")

    return service
Exemplo n.º 18
0
    def load(uid):
        """Load the ComputeJob associated with the specified uid"""
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        bucket = _get_service_account_bucket()
        key = "compute/job/%s" % uid

        try:
            data = _ObjectStore.get_object_from_json(bucket, key)
        except:
            data = None

        if data is None:
            raise KeyError("There is no job with UID = %s" % uid)

        return ComputeJob.from_data(data)
Exemplo n.º 19
0
def get_object_from_json(bucket, key):
    """ Removes the daterange from the passed key and uses the reduced
        key to get an object from the object store.

        Wraps the Acquire get_object_from_json function

        Args:
            bucket (dict): Bucket containing data
            key (str): Key for data in bucket
        Returns:
            Object: Object from store
    """
    # Get the object and use the key as a prefix
    name = ObjectStore.get_all_object_names(bucket, prefix=key)

    if len(name) > 1:
        raise ValueError("There should only be one object with this key")

    return ObjectStore.get_object_from_json(bucket, name[0])
Exemplo n.º 20
0
    def _get_aclrules(self, user_guid, aclrules, bucket=None):
        """Load up the ACLRules for this group. If none are set, then
           either the passed ACLRules will be used, or the specified
           user will be set as the owner
        """
        from Acquire.Identity import ACLRules as _ACLRules
        from Acquire.ObjectStore import ObjectStore as _ObjectStore

        if bucket is None:
            from Acquire.Service import get_service_account_bucket \
                as _get_service_account_bucket
            bucket = _get_service_account_bucket()

        aclkey = self._acls_key()

        try:
            self._aclrules = _ACLRules.from_data(
                _ObjectStore.get_object_from_json(bucket=bucket, key=aclkey))
        except:
            self._aclrules = None

        if self._aclrules is not None:
            return

        if aclrules is None:
            if user_guid is None:
                raise PermissionError(
                    "You must specify the guid of the initial user who "
                    "owns this account!")

            aclrules = _ACLRules.owner(user_guid=user_guid)
        elif not isinstance(aclrules, _ACLRules):
            raise TypeError("The ACLRules must be type ACLRules")

        _ObjectStore.set_object_from_json(bucket=bucket,
                                          key=aclkey,
                                          data=aclrules.to_data())

        self._aclrules = aclrules
Exemplo n.º 21
0
    def load(uid):
        """Return the WorkSheet with specified uid loaded from the
           ObjectStore
        """
        from Acquire.Service import assert_running_service \
            as _assert_running_service

        _assert_running_service()

        if uid is None:
            return

        from Acquire.Service import get_service_account_bucket \
            as _get_service_account_bucket

        bucket = _get_service_account_bucket()

        from Acquire.ObjectStore import ObjectStore as _ObjectStore

        key = "worksheet/%s" % str(uid)
        data = _ObjectStore.get_object_from_json(bucket, key)
        return WorkSheet.from_data(data)
Exemplo n.º 22
0
def _get_this_service_data():
    """Internal function that loads up the service info data from
       the object store.
    """
    assert_running_service()

    from Acquire.Service import ServiceAccountError

    # get the bucket again - can't pass as an argument as this is a cached
    # function - luckily _get_service_account_bucket is also a cached function
    try:
        from Acquire.Service import get_service_account_bucket as \
            _get_service_account_bucket
        bucket = _get_service_account_bucket()
    except ServiceAccountError as e:
        raise e
    except Exception as e:
        raise ServiceAccountError("Cannot log into the service account: %s" %
                                  str(e))

    # find the service info from the object store
    try:
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        service = _ObjectStore.get_object_from_json(bucket, _service_key)
    except Exception as e:
        from Acquire.Service import MissingServiceAccountError
        raise MissingServiceAccountError(
            "Unable to load the service account for this service. An "
            "error occured while loading the data from the object "
            "store: %s" % str(e))

    if not service:
        from Acquire.Service import MissingServiceAccountError
        raise MissingServiceAccountError(
            "You haven't yet created the service account "
            "for this service. Please create an account first.")

    return service
Exemplo n.º 23
0
def get_admin_users():
    """This function returns all of the admin_users data. This is a
       dictionary of the UIDs of all of the admin users
    """
    assert_running_service()

    from Acquire.Service import ServiceAccountError

    try:
        from Acquire.Service import get_service_account_bucket as \
            _get_service_account_bucket
        bucket = _get_service_account_bucket()
    except ServiceAccountError as e:
        raise e
    except Exception as e:
        raise ServiceAccountError("Cannot log into the service account: %s" %
                                  str(e))

    # find the admin accounts info from the object store
    try:
        key = "%s/admin_users" % _service_key
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        admin_users = _ObjectStore.get_object_from_json(bucket, key)
    except Exception as e:
        from Acquire.Service import MissingServiceAccountError
        raise MissingServiceAccountError(
            "Unable to load the Admin User data for this service. An "
            "error occured while loading the data from the object "
            "store: %s" % str(e))

    if not admin_users:
        from Acquire.Service import MissingServiceAccountError
        raise MissingServiceAccountError(
            "You haven't yet created any Admin Users for the service account "
            "for this service. Please create an Admin User first.")

    return admin_users
Exemplo n.º 24
0
def load_service_key_from_objstore(fingerprint):
    """This function will see if we have an old key with the requested
       fingerprint, and if so, we will try to load and return that
       key from the object store
    """
    from Acquire.ObjectStore import ObjectStore as _ObjectStore
    from Acquire.Service import get_service_account_bucket \
        as _get_service_account_bucket
    from Acquire.Crypto import KeyManipulationError

    bucket = _get_service_account_bucket()

    try:
        key = "%s/oldkeys/fingerprints/%s" % (_service_key, fingerprint)
        keyfile = _ObjectStore.get_string_object(bucket, key)
    except:
        keyfile = None

    if keyfile is None:
        raise KeyManipulationError(
            "Cannot find a key or certificate with fingerprint '%s' : %s" %
            (fingerprint, key))

    try:
        keydata = _ObjectStore.get_object_from_json(bucket, keyfile)
    except Exception as e:
        keydata = None
        error = str(e)

    if keydata is None:
        raise KeyManipulationError(
            "Unable to load the key or certificate with fingerprint '%s': %s" %
            (fingerprint, error))

    service = get_this_service(need_private_access=True)
    return service.load_keys(keydata)[fingerprint]
Exemplo n.º 25
0
def get_trusted_service(service_url=None,
                        service_uid=None,
                        service_type=None,
                        autofetch=True):
    """Return the trusted service info for the service with specified
       service_url or service_uid"""
    if service_url is not None:
        from Acquire.Service import Service as _Service
        service_url = _Service.get_canonical_url(service_url,
                                                 service_type=service_type)

    from Acquire.Service import is_running_service as _is_running_service

    if _is_running_service():
        from Acquire.Service import get_this_service as _get_this_service
        from Acquire.Service import Service as _Service
        from Acquire.Service import get_service_account_bucket as \
            _get_service_account_bucket
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.ObjectStore import url_to_encoded as \
            _url_to_encoded

        service = _get_this_service()

        if service_url is not None and service.canonical_url() == service_url:
            # we trust ourselves :-)
            return service

        if service_uid is not None and service.uid() == service_uid:
            # we trust ourselves :-)
            return service

        bucket = _get_service_account_bucket()
        uidkey = None
        data = None

        if service_uid is not None:
            uidkey = "_trusted/uid/%s" % service_uid
            try:
                data = _ObjectStore.get_object_from_json(bucket, uidkey)
            except:
                pass
        elif service_url is not None:
            urlkey = "_trusted/url/%s" % _url_to_encoded(service_url)
            try:
                uidkey = _ObjectStore.get_string_object(bucket, urlkey)
                if uidkey is not None:
                    data = _ObjectStore.get_object_from_json(bucket, uidkey)
            except:
                pass

        if data is not None:
            remote_service = _Service.from_data(data)

            if remote_service.should_refresh_keys():
                # need to update the keys in our copy of the service
                remote_service.refresh_keys()

                if uidkey is not None:
                    _ObjectStore.set_object_from_json(bucket, uidkey,
                                                      remote_service.to_data())

            return remote_service

        if not autofetch:
            from Acquire.Service import ServiceAccountError
            if service_uid is not None:
                raise ServiceAccountError(
                    "We do not trust the service with UID '%s'" % service_uid)
            else:
                raise ServiceAccountError(
                    "We do not trust the service at URL '%s'" % service_url)

        # we can try to fetch this data - we will ask our own
        # registry
        from Acquire.Registry import get_trusted_registry_service \
            as _get_trusted_registry_service
        registry = _get_trusted_registry_service(service_uid=service.uid())
        service = registry.get_service(service_uid=service_uid,
                                       service_url=service_url)

        from Acquire.Service import trust_service as _trust_service
        _trust_service(service)
        return service
    else:
        # this is running on the client
        from Acquire.Client import Wallet as _Wallet
        wallet = _Wallet()
        service = wallet.get_service(service_uid=service_uid,
                                     service_url=service_url,
                                     service_type=service_type,
                                     autofetch=autofetch)
        return service
Exemplo n.º 26
0
def run(args):
    """This function will allow the current user to authorise
       a logout from the current session - this will be authorised
       by signing the request to logout"""

    status = 0
    message = None

    session_uid = args["session_uid"]
    username = args["username"]
    permission = args["permission"]
    signature = string_to_bytes(args["signature"])

    # generate a sanitised version of the username
    user_account = UserAccount(username)

    # now log into the central identity account to query
    # the current status of this login session
    bucket = login_to_service_account()

    user_session_key = "sessions/%s/%s" % \
        (user_account.sanitised_name(), session_uid)

    request_session_key = "requests/%s/%s" % (session_uid[:8], session_uid)

    login_session = LoginSession.from_data(
        ObjectStore.get_object_from_json(bucket, user_session_key))

    if login_session:
        # get the signing certificate from the login session and
        # validate that the permission object has been signed by
        # the user requesting the logout
        cert = login_session.public_certificate()

        cert.verify(signature, permission)

        # the signature was correct, so log the user out. For record
        # keeping purposes we change the loginsession to a logout state
        # and move it to another part of the object store
        if login_session.is_approved():
            login_session.logout()

    # only save sessions that were successfully approved
    if login_session:
        if login_session.is_logged_out():
            expired_session_key = "expired_sessions/%s/%s" % \
                                    (user_account.sanitised_name(),
                                     session_uid)

            ObjectStore.set_object_from_json(bucket, expired_session_key,
                                             login_session.to_data())

    try:
        ObjectStore.delete_object(bucket, user_session_key)
    except:
        pass

    try:
        ObjectStore.delete_object(bucket, request_session_key)
    except:
        pass

    status = 0
    message = "Successfully logged out"

    return_value = create_return_value(status, message)

    return return_value
Exemplo n.º 27
0
def test_objstore(bucket):
    keys = []

    message = "ƒƒƒ Hello World ∂∂∂"

    ObjectStore.set_string_object(bucket, "test", message)
    keys.append("test")

    assert(message == ObjectStore.get_string_object(bucket, "test"))

    message = "€€#¢∞ Hello ˚ƒ´πµçµΩ"

    ObjectStore.set_string_object(bucket, "test/something", message)
    keys.append("test/something")

    assert(message == ObjectStore.get_string_object(bucket, "test/something"))

    data = {"cat": "mieow",
            "dog": "woof",
            "sounds": [1, 2, 3, 4, 5],
            "flag": True}

    ObjectStore.set_object_from_json(bucket, "test/object", data)
    keys.append("test/object")

    assert(data == ObjectStore.get_object_from_json(bucket, "test/object"))

    names = ObjectStore.get_all_object_names(bucket)

    assert(len(names) == len(keys))

    names = ObjectStore.get_all_object_names(bucket, "test")

    assert(len(names) == 3)

    names = ObjectStore.get_all_object_names(bucket, "test/")

    assert(len(names) == 2)

    names = ObjectStore.get_all_object_names(bucket, "test/some")

    assert(len(names) == 1)

    for name in names:
        assert(name in keys)

    new_bucket = ObjectStore.create_bucket(bucket, "new_bucket")

    ObjectStore.set_object_from_json(new_bucket, "test/object2", data)
    assert(data == ObjectStore.get_object_from_json(new_bucket,
                                                    "test/object2"))

    with pytest.raises(ObjectStoreError):
        new_bucket = ObjectStore.create_bucket(bucket, "testing_objstore")

    with pytest.raises(ObjectStoreError):
        new_bucket = ObjectStore.create_bucket(bucket, "new_bucket")

    with pytest.raises(ObjectStoreError):
        new_bucket = ObjectStore.get_bucket(bucket, "get_bucket",
                                            create_if_needed=False)

    new_bucket = ObjectStore.get_bucket(bucket, "get_bucket",
                                        create_if_needed=True)

    test_key = "test_string"
    test_value = "test_string_value"

    ObjectStore.set_string_object(new_bucket, test_key, test_value)

    new_bucket2 = ObjectStore.get_bucket(bucket, "get_bucket",
                                         create_if_needed=False)

    test_value2 = ObjectStore.get_string_object(new_bucket2, test_key)

    assert(test_value == test_value2)
Exemplo n.º 28
0
    def _reconcile_daily_accounts(self, bucket=None):
        """Internal function used to reconcile the daily accounts.
           This ensures that every line item transaction is summed up
           so that the starting balance for each day is recorded into
           the object store
        """
        if self.is_null():
            return

        if bucket is None:
            bucket = _login_to_service_account()

        # work back from today to the first day of the account to calculate
        # all of the daily balances... We need to record every day of the
        # account to support quick lookups
        today = _datetime.datetime.now().toordinal()
        day = today
        last_data = None
        num_missing_days = 0

        while last_data is None:
            daytime = _datetime.datetime.fromordinal(day)
            key = self._get_balance_key(daytime)
            last_data = _ObjectStore.get_object_from_json(bucket, key)

            if last_data is None:
                day -= 1
                num_missing_days += 1

                if num_missing_days > 100:
                    # we need another strategy to find the last balance
                    break

        if last_data is None:
            # find the latest day by reading the keys in the object
            # store directly
            root = "%s/balance/" % self._key()
            keys = _ObjectStore.get_all_object_names(bucket, root)

            if keys is None or len(keys) == 0:
                raise AccountError("There is no daily balance recorded for "
                                   "the account with UID %s" % self.uid())

            # the encoding of the keys is such that, when sorted, the
            # last key must be the latest balance
            keys.sort()

            last_data = _ObjectStore.get_object_from_json(
                bucket, "%s%s" % (root, keys[-1]))
            day = _get_day_from_key(keys[-1]).toordinal()

            if last_data is None:
                raise AccountError("How can there be no data for key %s?" %
                                   keys[-1])

        # what was the balance on the last day?
        result = (_create_decimal(last_data["balance"]),
                  _create_decimal(last_data["liability"]),
                  _create_decimal(last_data["receivable"]))

        # ok, now we go from the last day until today and sum up the
        # line items from each day to create the daily balances
        # (not including today, as we only want the balance at the beginning
        #  of today)
        for d in range(day + 1, today + 1):
            day_time = _datetime.datetime.fromordinal(d)
            transaction_keys = self._get_transaction_keys_between(
                _datetime.datetime.fromordinal(d - 1), day_time)

            total = _sum_transactions(transaction_keys)

            result = (result[0] + total[0], result[1] + total[1],
                      result[2] + total[2])

            balance_key = self._get_balance_key(day_time)

            data = {}
            data["balance"] = str(result[0])
            data["liability"] = str(result[1])
            data["receivable"] = str(result[2])

            _ObjectStore.set_object_from_json(bucket, balance_key, data)
Exemplo n.º 29
0
def run(args):
    """This function is called to handle request to cash cheques. This
       will verify that the cheque is valid and will then create
       the debit/credit note pair for the transation. It will return
       the CreditNote to the caller so they can see that the funds have
       been reserved, and can receipt the transaction once goods/services
       have been delivered.

       Args:
            args (dict): information for payment for service

        Returns:
            dict: contains status, status message and credit note if valid

    """

    credit_notes = []

    try:
        cheque = args["cheque"]
    except:
        raise ValueError("You must supply a cheque to be cashed!")

    try:
        cheque = Cheque.from_data(cheque)
    except Exception as e:
        from Acquire.Service import exception_to_string
        raise TypeError("Unable to interpret the cheque.\n\nCAUSE: %s" %
                        exception_to_string(e))

    try:
        spend = args["spend"]
    except:
        spend = None

    if spend is not None:
        try:
            spend = string_to_decimal(spend)
        except Exception as e:
            from Acquire.Service import exception_to_string
            raise TypeError("Unable to interpret the spend.\n\nCause: %s" %
                            exception_to_string(e))

    try:
        resource = str(args["resource"])
    except:
        raise ValueError(
            "You must supply a string representing the resource that will "
            "be paid for using this cheque")

    try:
        account_uid = str(args["account_uid"])
    except:
        raise ValueError("You must supply the UID of the account to which the "
                         "cheque will be cashed")

    try:
        receipt_by = args["receipt_by"]
    except:
        raise ValueError(
            "You must supply the datetime by which you promise to "
            "receipt this transaction")

    try:
        receipt_by = string_to_datetime(receipt_by)
    except Exception as e:
        from Acquire.Service import exception_to_string
        raise TypeError(
            "Unable to interpret the receipt_by date.\n\nCAUSE: %s" %
            exception_to_string(e))

    # now read the cheque - this will only succeed if the cheque
    # is valid, has been signed, has been sent from the right
    # service, and was authorised by the user, the cheque
    # has not expired and we are the
    # service which holds the account from which funds are drawn
    info = cheque.read(resource=resource, spend=spend, receipt_by=receipt_by)

    try:
        description = str(args["description"])
    except:
        description = info["resource"]

    authorisation = info["authorisation"]
    auth_resource = info["auth_resource"]
    user_guid = authorisation.user_guid()

    # the cheque is valid
    bucket = get_service_account_bucket()

    try:
        debit_account = Account(uid=info["account_uid"], bucket=bucket)
    except Exception as e:
        from Acquire.Service import exception_to_string
        raise PaymentError("Cannot find the account associated with the cheque"
                           "\n\nCAUSE: %s" % exception_to_string(e))

    try:
        credit_account = Account(uid=account_uid, bucket=bucket)
    except Exception as e:
        from Acquire.Service import exception_to_string
        raise PaymentError(
            "Cannot find the account to which funds will be creditted:"
            "\n\nCAUSE: %s" % exception_to_string(e))

    # validate that this account is in a group that can be authorised
    # by the user (this should eventually go as the ACLs now allow users
    # to authorised payments from many accounts)
    accounts = Accounts(user_guid=user_guid)
    if not accounts.contains(account=debit_account, bucket=bucket):
        raise PermissionError(
            "The user with UID '%s' cannot authorise transactions from "
            "the account '%s' as they do not own this account." %
            (user_guid, str(debit_account)))

    transaction = Transaction(value=info["spend"], description=description)

    # we have enough information to perform the transaction
    # - this is provisional as the service must receipt everything
    transaction_records = Ledger.perform(transactions=transaction,
                                         debit_account=debit_account,
                                         credit_account=credit_account,
                                         authorisation=authorisation,
                                         authorisation_resource=auth_resource,
                                         is_provisional=True,
                                         receipt_by=receipt_by,
                                         bucket=bucket)

    # extract all of the credit notes to return to the user,
    # and also to record so that we can check if they have not
    # been receipted in time...
    credit_notes = []

    for record in transaction_records:
        credit_notes.append(record.credit_note())

    credit_notes = list_to_string(credit_notes)

    receipt_key = "accounting/cashed_cheque/%s" % info["uid"]
    mutex = Mutex(receipt_key, bucket=bucket)

    try:
        receipted = ObjectStore.get_object_from_json(bucket, receipt_key)
    except:
        receipted = None

    if receipted is not None:
        # we have tried to cash this cheque twice!
        mutex.unlock()
        Ledger.refund(transaction_records, bucket=bucket)
    else:
        info = {"status": "needs_receipt", "creditnotes": credit_notes}
        ObjectStore.set_object_from_json(bucket, receipt_key, info)
        mutex.unlock()

    return {"credit_notes": credit_notes}
Exemplo n.º 30
0
    def get_service(self, service_uid=None, service_url=None):
        """Load and return the service with specified url or uid
           from the registry. This will consult with other
           registry services to find the matching service
        """
        from Acquire.ObjectStore import ObjectStore as _ObjectStore
        from Acquire.Service import Service as _Service
        from Acquire.ObjectStore import string_to_encoded \
            as _string_to_encoded
        from Acquire.Service import get_this_service as _get_this_service

        this_service = _get_this_service(need_private_access=False)

        if service_url is not None:
            from Acquire.Service import Service as _Service
            service_url = _Service.get_canonical_url(service_url)

        if this_service.uid() == service_uid:
            return this_service
        elif this_service.canonical_url() == service_url:
            return this_service

        bucket = self.get_bucket()

        service_key = self.get_service_key(service_uid=service_uid,
                                           service_url=service_url)

        service = None

        if service_key is not None:
            try:
                data = _ObjectStore.get_object_from_json(bucket=bucket,
                                                         key=service_key)
                service = _Service.from_data(data)
            except:
                pass

        if service is not None:
            must_write = False

            if service.uid() == "STAGE1":
                # we need to directly ask the service for its info
                service = self.challenge_service(service)

                if service.uid() == "STAGE1":
                    from Acquire.Service import MissingServiceError
                    raise MissingServiceError(
                        "Service %s|%s not available as it is still under "
                        "construction!" % (service_uid, service))

                # we can now move this service from pending to active
                uidkey = self._get_key_for_uid(service.uid())
                domain = self._get_domain(service.service_url())
                domainroot = self._get_root_key_for_domain(domain=domain)

                pending_key = "%s/pending/%s" % (domainroot, service.uid())
                active_key = "%s/active/%s" % (domainroot, service.uid())

                try:
                    _ObjectStore.delete_object(bucket=bucket,
                                               key=pending_key)
                except:
                    pass

                try:
                    _ObjectStore.set_string_object(bucket=bucket,
                                                   key=active_key,
                                                   string_data=uidkey)
                except:
                    pass

                must_write = True
            elif service.should_refresh_keys():
                service.refresh_keys()
                must_write = True

            if must_write:
                data = service.to_data()
                _ObjectStore.set_object_from_json(bucket=bucket,
                                                  key=service_key,
                                                  data=data)
            return service

        # we only get here if we can't find the service on this registry.
        # In the future, we will use the initial part of the UID of
        # the service to ask its registering registry for its data.
        # For now, we just raise an error
        from Acquire.Service import MissingServiceError
        raise MissingServiceError(
            "No service available: service_url=%s  service_uid=%s" %
                                  (service_url, service_uid))