Пример #1
0
 def get_applications(self, user, collection, since, token):
     """Get all applications that have been modified later than 'since'."""
     s = self._resume_session(token)
     since = round_time(since)
     updates = []
     # Check the collection metadata first.
     # It might be deleted, or last_modified might be too early.
     # In either case, this lets us bail out before doing any hard work.
     try:
         item = self._get_cached_metadata(s, user, collection)
         meta = json.loads(item.value)
     except KeyError:
         return updates
     if meta.get("deleted", False):
         raise CollectionDeletedError(meta.get("client_id", ""), meta.get("reason", ""))
     last_modified = round_time(meta.get("last_modified", 0))
     if last_modified < since:
         return updates
     # Read and return all apps with modification time > since.
     apps = meta.get("apps", [])
     for (last_modified, appid) in apps:
         last_modified = round_time(last_modified)
         if last_modified <= since:
             break
         key = "%s::item::%s" % (collection, appid)
         try:
             app = json.loads(s.get(key))
         except KeyError:
             # It has been deleted; ignore it.
             continue
         updates.append((last_modified, app))
     return updates
Пример #2
0
    def get_applications(self, user, collection, since, token):
        self._check_token(token)

        # is this a deleted collection ?
        res = self._execute(queries.IS_DEL, user=user, collection=collection)
        deleted = res.fetchone()
        if deleted is not None:
            raise CollectionDeletedError(deleted.client_id, deleted.reason)

        # get the last modified
        res = self._execute(queries.LAST_MODIFIED, user=user,
                            collection=collection)
        res = res.fetchone()
        if res in (None, (None,)):
            last_modified = None
        else:
            last_modified = res.last_modified

        # using hundredth of seconds
        since = int(round_time(since) * 100)

        # if since is after last_modified we just return []
        if last_modified < since:
            return []

        apps = self._execute(queries.GET_QUERY, user=user,
                             collection=collection, since=since)

        # XXX dumb: serialize/unserialize round trip for nothing
        return [(round_time(app.last_modified / 100.),
                 json.loads(app.data)) for app in apps]
Пример #3
0
 def get_last_modified(self, user, collection, token):
     self._check_token(token)
     res = self._execute(queries.LAST_MODIFIED, user=user,
                         collection=collection)
     res = res.fetchone()
     if res in (None, (None,)):
         return None
     # last modified is a timestamp * 100
     return round_time(res.last_modified / 100.)
Пример #4
0
 def get_last_modified(self, user, collection, token):
     """Get the latest last-modified time for any app in the collection."""
     s = self._resume_session(token)
     # To get the last-modified time we need only read the meta document.
     try:
         meta = json.loads(s.get(collection + "::meta"))
     except KeyError:
         return 0
     if meta.get("deleted", False):
         raise CollectionDeletedError(meta.get("client_id", ""), meta.get("reason", ""))
     return round_time(meta.get("last_modified", 0))
Пример #5
0
    def test_round_time(self):

        # returns a two-digits decimal of the current time
        res = round_time()
        self.assertEqual(len(str(res).split('.')[-1]), 2)

        # can take a timestamp
        res = round_time(129084.198271987)
        self.assertEqual(str(res), '129084.20')

        # can take a str timestamp
        res = round_time('129084.198271987')
        self.assertEqual(str(res), '129084.20')

        # bad values raise ValueErrors
        self.assertRaises(ValueError, round_time, 'bleh')
        self.assertRaises(ValueError, round_time, object())

        # changing the precision
        res = round_time(129084.198271987, precision=3)
        self.assertEqual(str(res), '129084.198')
Пример #6
0
    def test_round_time(self):

        # returns a two-digits decimal of the current time
        res = round_time()
        self.assertEqual(len(str(res).split('.')[-1]), 2)

        # can take a timestamp
        res = round_time(129084.198271987)
        self.assertEqual(str(res), '129084.20')

        # can take a str timestamp
        res = round_time('129084.198271987')
        self.assertEqual(str(res), '129084.20')

        # bad values raise ValueErrors
        self.assertRaises(ValueError, round_time, 'bleh')
        self.assertRaises(ValueError, round_time, object())

        # changing the precision
        res = round_time(129084.198271987, precision=3)
        self.assertEqual(str(res), '129084.198')
Пример #7
0
    def get_applications(self, user, collection, since, token):
        res = self._execute(queries.IS_DEL, user=user, collection=collection)
        deleted = res.fetchone()
        if deleted is not None:
            raise CollectionDeletedError(deleted.client_id, deleted.reason)

        since = int(round_time(since) * 100)
        apps = self._execute(queries.GET_QUERY, user=user,
                             collection=collection, since=since)

        # XXX dumb: serialize/unserialize round trip for nothing
        return [json.loads(app.data) for app in apps]
    def add_applications(self, user, collection, applications, token):
        self._check_token(token)
        res = self._execute(queries.IS_DEL, user=user, collection=collection)
        deleted = res.fetchone()
        res.close()
        if deleted is not None:
            self._execute(queries.REMOVE_DEL, user=user, collection=collection)

        now = int(round_time() * 100)

        # let's see if we have an uuid
        res = self._execute(queries.GET_UUID, user=user, collection=collection)
        res = res.fetchone()
        if res is None:
            # we need to create one
            uuid = '%s-%s' % (now, collection)
            self._execute(queries.ADD_UUID,
                          user=user,
                          collection=collection,
                          uuid=uuid)
        else:
            uuid = res.uuid

        # the *real* storage will do bulk inserts of course
        for app in applications:
            origin = app['origin']
            res = self._execute(queries.GET_BY_ORIGIN_QUERY,
                                user=user,
                                collection=collection,
                                origin=origin)
            res = res.fetchone()
            if res is None:
                self._execute(queries.PUT_QUERY,
                              user=user,
                              collection=collection,
                              last_modified=now,
                              data=json.dumps(app),
                              origin=app['origin'])
            else:
                ## FIXME: for debugging
                if res.data == json.dumps(app):
                    ## This is a logic error on the client:
                    logger.error(('Bad attempt to update an application '
                                  ' to overwrite itself: %r') % app['origin'])

                self._execute(queries.UPDATE_BY_ORIGIN_QUERY,
                              user=user,
                              collection=collection,
                              id=res.id,
                              data=json.dumps(app),
                              last_modified=now)
    def get_last_modified(self, user, collection, token):
        """Get the latest last-modified time for any app in the collection."""
        s = self._resume_session(token)

        # To get the last-modified time we need only read the meta document.
        try:
            item = self._get_cached_metadata(s, user, collection)
            meta = json.loads(item.value)
        except KeyError:
            return 0
        if meta.get("deleted", False):
            raise CollectionDeletedError(meta.get("client_id", ""),
                                         meta.get("reason", ""))
        return round_time(meta.get("last_modified", 0))
Пример #10
0
    def delete(self, user, collection, client_id, reason, token):
        s = self._resume_session(token)
        # Grab the collection metadata as it is before deleting anything.
        # We can bail out early if it's already deleted.
        meta_key = collection + "::meta"
        try:
            meta = s.getitem(meta_key)
        except KeyError:
            meta_etag = ""
            meta_data = {}
        else:
            meta_etag = meta.etag
            meta_data = json.loads(meta.value)
        if meta_data.get("deleted", False):
            return
        etags = meta_data.get("etags", {})
        # Update the metadata to mark it as deleted.
        # We do this first to minimize the impact of conflicts with
        # concurrent updates, by not deleting apps that some clients
        # might think are still in place.
        meta_data["deleted"] = True
        meta_data["client_id"] = client_id
        meta_data["reason"] = reason
        meta_data["apps"] = []
        meta_data["etags"] = {}
        meta_data["uuid"] = None
        meta_data["last_modified"] = round_time()
        s.set(meta_key, json.dumps(meta_data), if_match=meta_etag)

        # Now we can delete the applications that were recorded in
        # the metadata.
        # If we're doing this concurrently with an upload, we might get
        # some edit conflicts here.  There's not much we can do except
        # bail out - the uploader will get an edit conflict when they go to
        # save the metadata document, and hopefully they'll clean up the mess.
        for appid, etag in etags.iteritems():
            key = "%s::item::%s" % (collection, appid)
            try:
                s.delete(key, if_match=etag)
            except KeyError:
                # Someone else has already delete it, no biggie.
                pass
            except pysauropod.ConflictError:
                # Someone has uploaded a new version; they can deal with it.
                pass
    def delete(self, user, collection, client_id, reason, token):
        s = self._resume_session(token)
        # Grab the collection metadata as it is before deleting anything.
        # We can bail out early if it's already deleted.
        try:
            meta = self._get_cached_metadata(s, user, collection)
        except KeyError:
            meta_etag = ""
            meta_data = {}
        else:
            meta_etag = meta.etag
            meta_data = json.loads(meta.value)
        if meta_data.get("deleted", False):
            return
        etags = meta_data.get("etags", {})
        # Update the metadata to mark it as deleted.
        # We do this first to minimize the impact of conflicts with
        # concurrent updates, by not deleting apps that some clients
        # might think are still in place.
        meta_data["deleted"] = True
        meta_data["client_id"] = client_id
        meta_data["reason"] = reason
        meta_data["apps"] = []
        meta_data["etags"] = {}
        meta_data["uuid"] = None
        meta_data["last_modified"] = round_time()
        self._set_cached_metadata(s, user, collection, meta_data, meta_etag)

        # Now we can delete the applications that were recorded in
        # the metadata.
        # If we're doing this concurrently with an upload, we might get
        # some edit conflicts here.  There's not much we can do except
        # bail out - the uploader will get an edit conflict when they go to
        # save the metadata document, and hopefully they'll clean up the mess.
        for appid, etag in etags.iteritems():
            key = "%s::item::%s" % (collection, appid)
            try:
                s.delete(key, if_match=etag)
            except KeyError:
                # Someone else has already delete it, no biggie.
                pass
            except pysauropod.ConflictError:
                # Someone has uploaded a new version; they can deal with it.
                pass
Пример #12
0
    def add_applications(self, user, collection, applications, token):
        self._check_token(token)
        res = self._execute(queries.IS_DEL, user=user, collection=collection)
        deleted = res.fetchone()
        res.close()
        if deleted is not None:
            self._execute(queries.REMOVE_DEL, user=user, collection=collection)

        now = int(round_time() * 100)

        # let's see if we have an uuid
        res = self._execute(queries.GET_UUID, user=user,
                            collection=collection)
        res = res.fetchone()
        if res is None:
            # we need to create one
            uuid = '%s-%s' % (now, collection)
            self._execute(queries.ADD_UUID, user=user,
                          collection=collection, uuid=uuid)
        else:
            uuid = res.uuid

        # the *real* storage will do bulk inserts of course
        for app in applications:
            origin = app['origin']
            res = self._execute(queries.GET_BY_ORIGIN_QUERY, user=user,
                                collection=collection, origin=origin)
            res = res.fetchone()
            if res is None:
                self._execute(queries.PUT_QUERY, user=user,
                              collection=collection,
                              last_modified=now, data=json.dumps(app),
                              origin=app['origin'])
            else:
                ## FIXME: for debugging
                if res.data == json.dumps(app):
                    ## This is a logic error on the client:
                    logger.error(('Bad attempt to update an application '
                                  ' to overwrite itself: %r') % app['origin'])

                self._execute(queries.UPDATE_BY_ORIGIN_QUERY, user=user,
                              collection=collection,
                              id=res.id, data=json.dumps(app),
                              last_modified=now)
Пример #13
0
    def add_applications(self, user, collection, applications, token):
        """Add application updates to a collection."""
        s = self._resume_session(token)
        # Load the current metadata state so we can update it when finished.
        # We need it first so we can detect conflicts from concurrent uploads.
        try:
            meta = self._get_cached_metadata(s, user, collection)
        except KeyError:
            meta_etag = ""
            meta_data = {}
        else:
            meta_etag = meta.etag
            meta_data = json.loads(meta.value)
        apps = meta_data.get("apps", [])
        etags = meta_data.get("etags", {})
        # Generate a new last_modified timestamp, and make sure it's
        # actually larger than any existing timestamp. Yay clock skew!
        now = round_time()
        last_modified = round_time(meta_data.get("last_modified", 0))
        if now <= last_modified:
            now = last_modified + 1
        # Store the data for each application.
        # We use the stored etags to verify the the application hasn't
        # already been updated.  If it has been, we get the updated etag
        # so that we can repair the metadata document.
        has_conflict = False
        for app in applications:
            appid = app["origin"]
            etag = etags.get(appid, "")
            key = "%s::item::%s" % (collection, appid)
            value = json.dumps(app)
            try:
                item = s.set(key, value, if_match=etag)
            except pysauropod.ConflictError:
                # Someone else has changed that key.
                # If we're lucky, it was us in a previous failed write attempt.
                # Otherwise, we're going to need to report a conflict.
                try:
                    item = s.getitem(key)
                except KeyError:
                    has_conflict = True
                    etags[appid] = ""
                else:
                    etags[appid] = item.etag
                    if item.value != value:
                        has_conflict = True
            else:
                etags[appid] = item.etag
            # Update the app's modification time in the index list.
            # We'll re-sort the list once at the end.
            for i, item in enumerate(apps):
                if item[1] == appid:
                    apps[i] = [now, appid]
                    break
            else:
                apps.append([now, appid])
        # Update the metadata document.
        # Hopefully no-one else has written it in the meantime.
        # If we get a conflict, we leave all of our modifications in place.
        # The client will just try again later and happily find that all
        # of the keys have already been updated.
        apps.sort(reverse=True)
        meta_data["apps"] = apps
        meta_data["etags"] = etags
        if meta_data.pop("deleted", False):
            meta_data.pop("client_id", None)
            meta_data.pop("reason", None)
        meta_data["last_modified"] = now
        if not meta_data.get("uuid"):
            meta_data["uuid"] = uuid.uuid4().hex

        self._set_cached_metadata(s, user, collection, meta_data, meta_etag)

        # Finally, we have completed the writes.
        # Report back if we found some apps that had been changed and
        # could not be overwritten.
        if has_conflict:
            raise EditConflictError()
        return now
    def add_applications(self, user, collection, applications, token):
        """Add application updates to a collection."""
        s = self._resume_session(token)
        # Load the current metadata state so we can update it when finished.
        # We need it first so we can detect conflicts from concurrent uploads.
        try:
            meta = self._get_cached_metadata(s, user, collection)
        except KeyError:
            meta_etag = ""
            meta_data = {}
        else:
            meta_etag = meta.etag
            meta_data = json.loads(meta.value)
        apps = meta_data.get("apps", [])
        etags = meta_data.get("etags", {})
        # Generate a new last_modified timestamp, and make sure it's
        # actually larger than any existing timestamp. Yay clock skew!
        now = round_time()
        last_modified = round_time(meta_data.get("last_modified", 0))
        if now <= last_modified:
            now = last_modified + 1
        # Store the data for each application.
        # We use the stored etags to verify the the application hasn't
        # already been updated.  If it has been, we get the updated etag
        # so that we can repair the metadata document.
        has_conflict = False
        for app in applications:
            appid = app["origin"]
            etag = etags.get(appid, "")
            key = "%s::item::%s" % (collection, appid)
            value = json.dumps(app)
            try:
                item = s.set(key, value, if_match=etag)
            except pysauropod.ConflictError:
                # Someone else has changed that key.
                # If we're lucky, it was us in a previous failed write attempt.
                # Otherwise, we're going to need to report a conflict.
                try:
                    item = s.getitem(key)
                except KeyError:
                    has_conflict = True
                    etags[appid] = ""
                else:
                    etags[appid] = item.etag
                    if item.value != value:
                        has_conflict = True
            else:
                etags[appid] = item.etag
            # Update the app's modification time in the index list.
            # We'll re-sort the list once at the end.
            for i, item in enumerate(apps):
                if item[1] == appid:
                    apps[i] = [now, appid]
                    break
            else:
                apps.append([now, appid])
        # Update the metadata document.
        # Hopefully no-one else has written it in the meantime.
        # If we get a conflict, we leave all of our modifications in place.
        # The client will just try again later and happily find that all
        # of the keys have already been updated.
        apps.sort(reverse=True)
        meta_data["apps"] = apps
        meta_data["etags"] = etags
        if meta_data.pop("deleted", False):
            meta_data.pop("client_id", None)
            meta_data.pop("reason", None)
        meta_data["last_modified"] = now
        if not meta_data.get("uuid"):
            meta_data["uuid"] = uuid.uuid4().hex

        self._set_cached_metadata(s, user, collection, meta_data, meta_etag)

        # Finally, we have completed the writes.
        # Report back if we found some apps that had been changed and
        # could not be overwritten.
        if has_conflict:
            raise EditConflictError()
        return now
Пример #15
0
def get_data(request):
    """After authenticating with the server and getting back the URL of the
    collection, request::

        GET /collections/{user}/{collection}?since=timestamp

    `since` is optional; on first sync is should be empty or left off. The
    server will return an object::

        {until: timestamp,
         uuid: someuniquevalue,  # using a timestamp XXXX
         incomplete: bool, applications: {origin: {...},
                                                   ...} }

    The response may not be complete if there are too many applications.
    If this is the case then `incomplete` will be true (it may be left out
    if
    the response is complete).  Another request using `since={until}` will
    get further applications (this may need to be repeated many times).

    The client should save the value of `until` and use it for subsequent
    requests.

    In the case of no new items the response will be only::

        {until: timestamp}

    The client should always start with this GET request and only then send
    its own updates.  It should ensure that its local timestamp is
    sensible in comparison to the value of `until`.

    Applications returned may be older than the local applications, in that
    case then the client should ignore the server's application and use
    its local copy, causing an overwrite.  The same is true for deleted
    applications; if the local installed copy has a `last_modified` date
    newer than the deleted server instance then the server instance
    should be ignored (the user reinstalled an application).

    **NOTE:** there are some conflicts that may occur, specifically
    receipts should be merged.

    When an application is added from the server the client should
    *locally* set `app.sync` to true (this is in the [application
    representation]
    (https://developer.mozilla.org/en/OpenWebApps/The_JavaScript_API
    #Application_Representation), not the manifest).

    You must always retain `last_modified` as you received it from
    the server (unless you ignore the application in favor of a
    newer local version).
    """
    user, collection, dbtoken = check_auth(request)

    try:
        since = request.GET.get('since', '0')
        since = round_time(since)
    except TypeError:
        raise bad_request(INVALID_SINCE_VALUE)
    except ValueError:
        print 'Bad since', repr(since)
        raise bad_request(INVALID_SINCE_VALUE,
                          'Invalid value for since: %r' % since)

    if since.is_nan():
        raise bad_request(INVALID_SINCE_VALUE,
                          'Got NaN value for since')

    storage = get_storage(request)

    res = {'since': since,
           'until': round_time(),
           'uuid': storage.get_uuid(user, collection, dbtoken)}

    try:
        res['applications'] = storage.get_applications(user, collection,
                                                       since, token=dbtoken)
    except CollectionDeletedError, e:
        return {'collection_deleted': {'reason': e.reason,
                                       'client_id': e.client_id}}
Пример #16
0
def post_data(request):
    """The client should keep track of the last time it sent updates to the
    server, and send updates when there are newer applications.

    **NOTE:** there is a case when an update might be lost because of an
    update from another device; this would be okay except that the client
    doesn't know it needs to re-send that update.  How do we confirm that ?

    The updates are sent with::

        POST /collections/{user}/{collection}?lastget=somedate

        {origin: {...}, ...}

    Each object must have a `last_modified` key.

    The response is only::

        {received: timestamp}

    XXX
    if lastget (timestamp) was provided and the collection has been changed
    since that date, we send back a 412 Precondition Failed.

    """
    user, collection, dbtoken = check_auth(request)
    server_time = round_time()
    storage = get_storage(request)

    if 'delete' in request.params:
        # we were asked to delete the collection
        try:
            info = request.json_body
        except ValueError:
            raise bad_request(INVALID_JSON)

        if 'client_id' not in info:
            raise bad_request(MISSING_VALUE)

        client_id = info['client_id']
        reason = info.get('reason', '')
        storage.delete(user, collection, client_id, reason, token=dbtoken)
        return {'received': server_time}

    elif 'lastget' in request.params:
        last_get = round_time(float(request.params['lastget']))
        last_modified = storage.get_last_modified(user, collection,
                                                  token=dbtoken)
        if last_modified > last_get:
            raise exc.HTTPPreconditionFailed()

    try:
        apps = request.json_body
    except ValueError:
        raise bad_request(INVALID_JSON)

    # in case this fails, the error will get logged
    # and the user will get a 503 (empty body)

    storage.add_applications(user, collection, apps, token=dbtoken)

    return {'received': server_time}
def get_data(request):
    """After authenticating with the server and getting back the URL of the
    collection, request::

        GET /collections/{user}/{collection}?since=timestamp

    `since` is optional; on first sync is should be empty or left off. The
    server will return an object::

        {until: timestamp,
         uuid: someuniquevalue,  # using a timestamp XXXX
         incomplete: bool, applications: {origin: {...},
                                                   ...} }

    The response may not be complete if there are too many applications.
    If this is the case then `incomplete` will be true (it may be left out
    if
    the response is complete).  Another request using `since={until}` will
    get further applications (this may need to be repeated many times).

    The client should save the value of `until` and use it for subsequent
    requests.

    In the case of no new items the response will be only::

        {until: timestamp}

    The client should always start with this GET request and only then send
    its own updates.  It should ensure that its local timestamp is
    sensible in comparison to the value of `until`.

    Applications returned may be older than the local applications, in that
    case then the client should ignore the server's application and use
    its local copy, causing an overwrite.  The same is true for deleted
    applications; if the local installed copy has a `last_modified` date
    newer than the deleted server instance then the server instance
    should be ignored (the user reinstalled an application).

    **NOTE:** there are some conflicts that may occur, specifically
    receipts should be merged.

    When an application is added from the server the client should
    *locally* set `app.sync` to true (this is in the [application
    representation]
    (https://developer.mozilla.org/en/OpenWebApps/The_JavaScript_API
    #Application_Representation), not the manifest).

    You must always retain `last_modified` as you received it from
    the server (unless you ignore the application in favor of a
    newer local version).
    """
    user, collection, dbtoken = check_auth(request)

    try:
        since = request.GET.get('since', '0')
        since = round_time(since)
    except TypeError:
        raise bad_request(INVALID_SINCE_VALUE)
    except ValueError:
        logger.error('Bad since %r' % since)
        raise bad_request(INVALID_SINCE_VALUE,
                          'Invalid value for since: %r' % since)

    if since.is_nan():
        raise bad_request(INVALID_SINCE_VALUE, 'Got NaN value for since')

    storage = get_storage(request)

    res = {'since': since, 'uuid': storage.get_uuid(user, collection, dbtoken)}
    until = 0
    apps = []
    try:
        for index, (last_modified, app) in enumerate(
                storage.get_applications(user,
                                         collection,
                                         since,
                                         token=dbtoken)):
            if last_modified > until:
                until = last_modified

            apps.append(app)

        res['applications'] = apps
        if not until:
            until = since
        res['until'] = until

    except CollectionDeletedError, e:
        return {
            'collection_deleted': {
                'reason': e.reason,
                'client_id': e.client_id
            }
        }
def post_data(request):
    """The client should keep track of the last time it sent updates to the
    server, and send updates when there are newer applications.

    **NOTE:** there is a case when an update might be lost because of an
    update from another device; this would be okay except that the client
    doesn't know it needs to re-send that update.  How do we confirm that ?

    The updates are sent with::

        POST /collections/{user}/{collection}?lastget=somedate

        {origin: {...}, ...}

    Each object must have a `last_modified` key.

    The response is only::

        {received: timestamp}

    XXX
    if lastget (timestamp) was provided and the collection has been changed
    since that date, we send back a 412 Precondition Failed.

    """
    user, collection, dbtoken = check_auth(request)
    server_time = round_time()
    storage = get_storage(request)

    if 'delete' in request.params:
        # we were asked to delete the collection
        try:
            info = request.json_body
        except ValueError:
            raise bad_request(INVALID_JSON)

        if 'client_id' not in info:
            raise bad_request(MISSING_VALUE)

        client_id = info['client_id']
        reason = info.get('reason', '')
        storage.delete(user, collection, client_id, reason, token=dbtoken)
        return {'received': server_time}

    elif 'lastget' in request.params:
        last_get = round_time(float(request.params['lastget']))
        last_modified = storage.get_last_modified(user,
                                                  collection,
                                                  token=dbtoken)
        if last_modified > last_get:
            raise exc.HTTPPreconditionFailed()

    try:
        apps = request.json_body
    except ValueError:
        raise bad_request(INVALID_JSON)

    # in case this fails, the error will get logged
    # and the user will get a 503 (empty body)

    storage.add_applications(user, collection, apps, token=dbtoken)

    return {'received': server_time}