def syncPloneObjects(self, fti, records, sf_object_id=None):
        """
        Given the results from Salesforce, update or create the appropriate
        Plone objects.
        """
        
        time_start = time.time()
        schema = fti.lookupSchema()
        
        catalog = getToolByName(self.context, 'portal_catalog')
        query = {
            'object_provides': schema.__identifier__,
        }
        
        sfid_map = dict([(b.sf_object_id, b) for b in catalog.searchResults(query) \
            if b.sf_object_id])
        
        objects_updated_count = 0
        for i, record in enumerate(records):
            digest = sha1(str(record)).digest()
            if record.Id in sfid_map.keys():
                sfobj = ISalesforceObject(sfid_map[record.Id].getObject())
                del sfid_map[record.Id]
            
                # skip updating items that haven't changed, based on the digest
                if digest == sfobj.sf_data_digest:
                    continue
            
                sfobj.updatePloneObject(record)
            else:
                obj = createObject(fti.factory)
                notify(ObjectCreatedEvent(obj))
                sfobj = ISalesforceObject(obj)
                sfobj.updatePloneObject(record)
                sfobj.addToContainer()
            
            objects_updated_count += 1
            sfobj.sf_data_digest = digest
            
            # Trigger ObjectModifiedEvent to reindex the object.
            # We mark it so that handlers can avoid taking action when
            # objects are updated in this way (such as a handler that
            # writes back to Salesforce).
            event = ObjectModifiedEvent(sfobj.context)
            alsoProvides(event, IModifiedViaSalesforceSync)
            notify(event)
            
            # Send an UpdatedFromSalesforce event.
            notify(UpdatedFromSalesforceEvent(sfobj.context))
                                    
            # Commit periodically.
            if not objects_updated_count % 10:
                try:
                    transaction.commit()
                    logger.debug('Committed updates (%s)' % i)
                except ConflictError:
                    # if there was a conflict subsequent commits will fail;
                    # so explicitly start a new transaction
                    logger.exception('Conflict on updates (%s)' % i)
                    transaction.begin()
        
        # Send NotFoundInSalesforce events for objects that weren't
        # returned by the Salesforce query.
        # We skip this if an sf_object_id was supplied, because that means
        # we intentionally didn't find all of the objects.
        if sf_object_id is None:
            for i, item in enumerate(sfid_map.items()):
                sf_id, brain = item
                notify(NotFoundInSalesforceEvent(brain.getObject()))
            
                # Commit periodically.
                if not i % 10:
                    transaction.commit()

        time_elapsed = time.time() - time_start
        logger.debug('Sync completed in %s seconds. Have a nice day.' % time_elapsed)
 def syncPloneObjects(self, fti, records, sf_object_id=None, ignore_no_container=None):
     """
     Given the results from Salesforce, update or create the appropriate
     Plone objects.
     """
     
     time_start = time.time()
     schema = fti.lookupSchema()
     
     catalog = getToolByName(self.context, 'portal_catalog')
     query = {
         'object_provides': schema.__identifier__,
     }
     
     sfid_map = dict([(b.sf_object_id, b) for b in catalog.searchResults(query) \
         if b.sf_object_id])
     
     objects_updated_count = 0
     for i, record in enumerate(records):
         digest = sha1(str(record)).digest()
         if record.Id in sfid_map.keys():
             sfobj = ISalesforceObject(sfid_map[record.Id].getObject())
             del sfid_map[record.Id]
         
             # skip updating items that haven't changed, based on the digest
             if digest == sfobj.sf_data_digest:
                 continue
         
             sfobj.updatePloneObject(record)
         else:
             obj = createObject(fti.factory)
             notify(ObjectCreatedEvent(obj))
             sfobj = ISalesforceObject(obj)
             sfobj.updatePloneObject(record)
             try:
                 sfobj.addToContainer()
             except ValueError, e:
                 if ignore_no_container:
                     pass
                 else:
                     raise e
         
         objects_updated_count += 1
         sfobj.sf_data_digest = digest
         
         # Trigger ObjectModifiedEvent to reindex the object.
         # We mark it so that handlers can avoid taking action when
         # objects are updated in this way (such as a handler that
         # writes back to Salesforce).
         event = ObjectModifiedEvent(sfobj.context)
         alsoProvides(event, IModifiedViaSalesforceSync)
         notify(event)
         
         # Send an UpdatedFromSalesforce event.
         notify(UpdatedFromSalesforceEvent(sfobj.context))
                                 
         # Commit periodically.
         if not objects_updated_count % 10:
             try:
                 transaction.commit()
                 logger.debug('Committed updates (%s)' % i)
             except ConflictError:
                 # if there was a conflict subsequent commits will fail;
                 # so explicitly start a new transaction
                 logger.exception('Conflict on updates (%s)' % i)
                 transaction.begin()