def test_process_records_cascade(mocker, change, cascade): """Test if certain changes cascade to child elements""" cascade_mock = mocker.patch( 'indico_livesync.simplify._process_cascaded_category_contents') records = [LiveSyncQueueEntry(change=change)] process_records(records) assert cascade_mock.call_args == (({records[0]} if cascade else set(), ), )
def test_process_records_simplify(changes, mocker, db, create_event, dummy_agent): """Test if queue entries for the same object are properly simplified""" event1 = create_event(id_=1) event2 = create_event(id_=2) db.session.add(dummy_agent) db.session.add(event1) db.session.add(event2) refs = ({ 'type': EntryType.event, 'event_id': event1.id }, { 'type': EntryType.event, 'event_id': event2.id }) queue = [] changes = changes[:3], changes[3:] expected = [0, 0] for i, ref in enumerate(refs): if changes[i][0]: queue.append( LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.created if changes[i][1]: queue += [ LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref) ] expected[i] |= SimpleChange.updated if changes[i][2]: queue.append( LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.deleted db.session.flush() result = process_records(queue) assert result == process_records( reversed(queue)) # queue order shouldn't matter assert len(result) == sum(1 for x in expected if x) result_refs = {obj.id: change for obj, change in result.items()} for i, ref in enumerate(refs): assert (ref['event_id'] in list(result_refs)) == bool(expected[i]) assert result_refs.get(ref['event_id'], 0) == expected[i]
def test_process_records_category_ignored(mocker, change, invalid): """Test if categories are only kepy for certain changes""" cascade = mocker.patch( 'indico_livesync.simplify._process_cascaded_category_contents') cascade.return_value = [object()] records = [LiveSyncQueueEntry(change=change, type=EntryType.category)] if invalid: with pytest.raises(AssertionError): process_records(records) else: result = process_records(records) assert len(result) == 1 assert list(result.values())[0] == SimpleChange.updated
def test_process_records_category_ignored(dummy_agent, dummy_category, dummy_event, change, invalid, simplified): """Test if categories are only kept for certain changes.""" records = [ LiveSyncQueueEntry(agent=dummy_agent, change=change, type=EntryType.category, category=dummy_category) ] if invalid: with pytest.raises(AssertionError): process_records(records) else: result = process_records(records) assert result == {dummy_event: simplified}
def run(self, records): """Runs the batch upload :param records: an iterable containing queue entries """ self_name = type(self).__name__ simplified = self.query_data(process_records(records)).items() total = len(simplified) if self.from_cli: simplified = self._make_verbose(simplified, total) try: self.logger.info( f'{self_name} uploading %d changes from %d records', total, len(records)) if not self.upload_records(simplified): self.logger.warning('uploader indicated a failure') return except Exception: self.logger.exception(f'{self_name} failed') if self.from_cli: raise return self.processed_records(records) self.logger.info( f'{self_name} finished (%d total changes from %d records)', total, len(records))
def test_process_records_simplify_deleted_undeleted(db, create_event, dummy_agent): db.session.add(dummy_agent) event = create_event() queue = [ LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.undeleted, agent=dummy_agent, type=EntryType.event, event=event), ] db.session.flush() result = process_records(queue) # this is not ideal (an empty dict would be better here, as being deleted first and THEN being restored), # could cancel each other, but there is no good way to do this without losing the more important # functionality from the test above unless we take the order of changes into account - and with the # cascading logic this is not really possible without cascading each queue entry separately, but doing # so would likely result in worse performance. # see the comment in `process_records` for details assert result == {event: SimpleChange.created}
def test_process_records_simplify_created_deleted_undeleted( db, create_event, dummy_agent): db.session.add(dummy_agent) event = create_event() queue = [ LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.undeleted, agent=dummy_agent, type=EntryType.event, event=event), ] db.session.flush() result = process_records(queue) # a restore always results in a creation event, even if the other changes cancelled each other assert result == {event: SimpleChange.created}
def test_process_records_simplify_created_deleted_child( db, create_event, create_contribution, dummy_agent): """Test if deleted items of newly created events are still cascaded. This is needed because when cloning an event, there's just a creation record for the event itself, but cascading creates creation records for all its child objects (e.g. contributions). Now, when such a contribution gets deleted WITHOUT a livesync run in between, we have the event creation record and a contribution deletion record. But when cascading the creation, usually deleted objects are skipped, so we'd try to delete something from the search service that doesn't exist there. """ db.session.add(dummy_agent) event = create_event() contrib1 = create_contribution(event, 'Test 1') contrib2 = create_contribution(event, 'Test 2', is_deleted=True) queue = [ LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.contribution, contribution=contrib2), ] db.session.flush() result = process_records(queue) assert result == { event: SimpleChange.created, contrib1: SimpleChange.created, }
def run(self, records): """Runs the batch upload :param records: an iterable containing queue entries """ self_name = type(self).__name__ for i, batch in enumerate( grouper(records, self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s processing batch %d', self_name, i) try: for j, proc_batch in enumerate( grouper(process_records(batch).items(), self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s uploading chunk #%d (batch %d)', self_name, j, i) self.upload_records({k: v for k, v in proc_batch}, from_queue=True) except Exception: self.logger.exception('%s could not upload batch', self_name) return self.logger.info('%s finished batch %d', self_name, i) self.processed_records(batch) self.logger.info('%s finished', self_name)
def test_process_records_simplify_created_deleted(db, create_event, dummy_agent): db.session.add(dummy_agent) event = create_event() queue = [ LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.event, event=event), ] db.session.flush() result = process_records(queue) # creation + deletion should cancel each other out assert result == {}