示例#1
0
    def testBatchSize(self):
        for sync_type in chromiumsync.ALL_TYPES[1:]:
            specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
            self.model = chromiumsync.SyncDataModel()
            request_types = [sync_type]

            for i in range(self.model._BATCH_SIZE * 3):
                entry = sync_pb2.SyncEntity()
                entry.id_string = 'batch test %d' % i
                entry.specifics.CopyFrom(specifics)
                self.model._SaveEntry(entry)
            last_bit = self.ExpectedPermanentItemCount(sync_type)
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, 0))
            self.assertEqual(self.model._BATCH_SIZE, version)
            self.assertEqual(self.model._BATCH_SIZE * 2 + last_bit,
                             changes_remaining)
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, version))
            self.assertEqual(self.model._BATCH_SIZE * 2, version)
            self.assertEqual(self.model._BATCH_SIZE + last_bit,
                             changes_remaining)
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, version))
            self.assertEqual(self.model._BATCH_SIZE * 3, version)
            self.assertEqual(last_bit, changes_remaining)
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, version))
            self.assertEqual(self.model._BATCH_SIZE * 3 + last_bit, version)
            self.assertEqual(0, changes_remaining)

            # Now delete a third of the items.
            for i in xrange(self.model._BATCH_SIZE * 3 - 1, 0, -3):
                entry = sync_pb2.SyncEntity()
                entry.id_string = 'batch test %d' % i
                entry.deleted = True
                self.model._SaveEntry(entry)

            # The batch counts shouldn't change.
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, 0))
            self.assertEqual(self.model._BATCH_SIZE, len(changes))
            self.assertEqual(self.model._BATCH_SIZE * 2 + last_bit,
                             changes_remaining)
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, version))
            self.assertEqual(self.model._BATCH_SIZE, len(changes))
            self.assertEqual(self.model._BATCH_SIZE + last_bit,
                             changes_remaining)
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, version))
            self.assertEqual(self.model._BATCH_SIZE, len(changes))
            self.assertEqual(last_bit, changes_remaining)
            version, changes, changes_remaining = (
                self.GetChangesFromTimestamp(request_types, version))
            self.assertEqual(last_bit, len(changes))
            self.assertEqual(self.model._BATCH_SIZE * 4 + last_bit, version)
            self.assertEqual(0, changes_remaining)
示例#2
0
 def DoCommit(original=None,
              id_string='',
              name=None,
              parent=None,
              position=0):
     proto = sync_pb2.SyncEntity()
     if original is not None:
         proto.version = original.version
         proto.id_string = original.id_string
         proto.parent_id_string = original.parent_id_string
         proto.name = original.name
     else:
         proto.id_string = id_string
         proto.version = 0
     proto.specifics.CopyFrom(specifics)
     if name is not None:
         proto.name = name
     if parent:
         proto.parent_id_string = parent.id_string
     proto.insert_after_item_id = 'please discard'
     proto.position_in_parent = position
     proto.folder = True
     proto.deleted = False
     result = self.model.CommitEntry(proto, my_cache_guid,
                                     commit_session)
     self.assertTrue(result)
     return (proto, result)
 def AssertPositionResult(my_id, parent_id, prev_id, expected_position):
     entry = sync_pb2.SyncEntity()
     entry.id_string = my_id
     self.model._WritePosition(entry, parent_id, prev_id)
     self.assertEqual(expected_position, entry.position_in_parent)
     self.assertEqual(parent_id, entry.parent_id_string)
     self.assertFalse(entry.HasField('insert_after_item_id'))
示例#4
0
 def testSaveEntry(self):
     proto = sync_pb2.SyncEntity()
     proto.id_string = 'abcd'
     proto.version = 0
     self.assertFalse(self.model._ItemExists(proto.id_string))
     self.model._SaveEntry(proto)
     self.assertEqual(1, proto.version)
     self.assertTrue(self.model._ItemExists(proto.id_string))
     self.model._SaveEntry(proto)
     self.assertEqual(2, proto.version)
     proto.version = 0
     self.assertTrue(self.model._ItemExists(proto.id_string))
     self.assertEqual(2, self.model._entries[proto.id_string].version)
示例#5
0
            def MakeTombstone(id_string):
                """Make a tombstone entry that will replace the entry being deleted.

        Args:
          id_string: Index of the SyncEntity to be deleted.
        Returns:
          A new SyncEntity reflecting the fact that the entry is deleted.
        """
                # Only the ID, version and deletion state are preserved on a tombstone.
                # TODO(nick): Does the production server not preserve the type?  Not
                # doing so means that tombstones cannot be filtered based on
                # requested_types at GetUpdates time.
                tombstone = sync_pb2.SyncEntity()
                tombstone.id_string = id_string
                tombstone.deleted = True
                tombstone.name = ''
                return tombstone
示例#6
0
    def _CreatePermanentItem(self, spec):
        """Create one permanent item from its spec, if it doesn't exist.

    The resulting item is added to the changelog.

    Args:
      spec: A PermanentItem object holding the properties of the item to create.
    """
        id_string = self._ServerTagToId(spec.tag)
        if self._ItemExists(id_string):
            return
        print 'Creating permanent item: %s' % spec.name
        entry = sync_pb2.SyncEntity()
        entry.id_string = id_string
        entry.non_unique_name = spec.name
        entry.name = spec.name
        entry.server_defined_unique_tag = spec.tag
        entry.folder = True
        entry.deleted = False
        entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
        self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
        self._SaveEntry(entry)
    def testAcknowledgeManagedUser(self):
        # Create permanent items.
        self.GetChangesFromTimestamp([chromiumsync.MANAGED_USER], 0)
        proto = sync_pb2.SyncEntity()
        proto.id_string = 'abcd'
        proto.version = 0

        # Make sure the managed_user field exists.
        proto.specifics.managed_user.acknowledged = False
        self.assertTrue(proto.specifics.HasField('managed_user'))
        self.AddToModel(proto)
        version1, changes1, remaining1 = (self.GetChangesFromTimestamp(
            [chromiumsync.MANAGED_USER], 0))
        for change in changes1:
            self.assertTrue(not change.specifics.managed_user.acknowledged)

        # Turn on managed user acknowledgement
        self.model.acknowledge_managed_users = True

        version2, changes2, remaining2 = (self.GetChangesFromTimestamp(
            [chromiumsync.MANAGED_USER], 0))
        for change in changes2:
            self.assertTrue(change.specifics.managed_user.acknowledged)
示例#8
0
  def CommitEntry(self, entry, cache_guid, commit_session):
    """Attempt to commit one entry to the user's account.

    Args:
      entry: A SyncEntity protobuf representing desired object changes.
      cache_guid: A string value uniquely identifying the client; this
        is used for ID generation and will determine the originator_cache_guid
        if the entry is new.
      commit_session: A dictionary mapping client IDs to server IDs for any
        objects committed earlier this session.  If the entry gets a new ID
        during commit, the change will be recorded here.
    Returns:
      A SyncEntity reflecting the post-commit value of the entry, or None
      if the entry was not committed due to an error.
    """
    entry = DeepCopyOfProto(entry)

    # Generate server IDs for this entry, and write generated server IDs
    # from earlier entries into the message's fields, as appropriate.  The
    # ID generation state is stored in 'commit_session'.
    self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)

    # Perform the optimistic concurrency check on the entry's version number.
    # Clients are not allowed to commit unless they indicate that they've seen
    # the most recent version of an object.
    if not self._CheckVersionForCommit(entry):
      return None

    # Check the validity of the parent ID; it must exist at this point.
    # TODO(nick): Implement cycle detection and resolution.
    if not self._CheckParentIdForCommit(entry):
      return None

    # At this point, the commit is definitely going to happen.

    # Deletion works by storing a limited record for an entry, called a
    # tombstone.  A sync server must track deleted IDs forever, since it does
    # not keep track of client knowledge (there's no deletion ACK event).
    if entry.deleted:
      # Only the ID, version and deletion state are preserved on a tombstone.
      # TODO(nick): Does the production server not preserve the type?  Not
      # doing so means that tombstones cannot be filtered based on
      # requested_types at GetUpdates time.
      tombstone = sync_pb2.SyncEntity()
      tombstone.id_string = entry.id_string
      tombstone.deleted = True
      tombstone.name = ''  # 'name' is a required field; we're stuck with it.
      entry = tombstone
    else:
      # Comments in sync.proto detail how the representation of positional
      # ordering works: the 'insert_after_item_id' field specifies a
      # predecessor during Commit operations, but the 'position_in_parent'
      # field provides an absolute ordering in GetUpdates contexts.  Here
      # we convert from the former to the latter.  Specifically, we'll
      # generate a numeric position placing the item just after the object
      # identified by 'insert_after_item_id', and then clear the
      # 'insert_after_item_id' field so that it's not sent back to the client
      # during later GetUpdates requests.
      if entry.HasField('insert_after_item_id'):
        self._WritePosition(entry, entry.parent_id_string,
                            entry.insert_after_item_id)
      else:
        self._WritePosition(entry, entry.parent_id_string)

    # Preserve the originator info, which the client is not required to send
    # when updating.
    base_entry = self._entries.get(entry.id_string)
    if base_entry and not entry.HasField("originator_cache_guid"):
      entry.originator_cache_guid = base_entry.originator_cache_guid
      entry.originator_client_item_id = base_entry.originator_client_item_id

    # Commit the change.  This also updates the version number.
    self._SaveEntry(entry)
    # TODO(nick): Handle recursive deletion.
    return entry
 def MakeProto(id_string, parent, position):
     proto = sync_pb2.SyncEntity()
     proto.id_string = id_string
     proto.position_in_parent = position
     proto.parent_id_string = parent
     self.AddToModel(proto)