def test_process_records_simplify_created_deleted_undeleted( db, create_event, dummy_agent): db.session.add(dummy_agent) event = create_event() queue = [ LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.undeleted, agent=dummy_agent, type=EntryType.event, event=event), ] db.session.flush() result = process_records(queue) # a restore always results in a creation event, even if the other changes cancelled each other assert result == {event: SimpleChange.created}
def test_process_records_simplify_created_deleted_child( db, create_event, create_contribution, dummy_agent): """Test if deleted items of newly created events are still cascaded. This is needed because when cloning an event, there's just a creation record for the event itself, but cascading creates creation records for all its child objects (e.g. contributions). Now, when such a contribution gets deleted WITHOUT a livesync run in between, we have the event creation record and a contribution deletion record. But when cascading the creation, usually deleted objects are skipped, so we'd try to delete something from the search service that doesn't exist there. """ db.session.add(dummy_agent) event = create_event() contrib1 = create_contribution(event, 'Test 1') contrib2 = create_contribution(event, 'Test 2', is_deleted=True) queue = [ LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.contribution, contribution=contrib2), ] db.session.flush() result = process_records(queue) assert result == { event: SimpleChange.created, contrib1: SimpleChange.created, }
def _apply_changes(sender, **kwargs): if not hasattr(g, 'livesync_changes'): return for ref, changes in g.livesync_changes.iteritems(): if is_ref_excluded(ref): continue LiveSyncQueueEntry.create(changes, ref)
def test_clean_old_entries(dummy_event, db, dummy_agent): now = now_utc() for processed in (True, False): for day in range(10): db.session.add( LiveSyncQueueEntry(agent=dummy_agent, change=ChangeType.created, type=EntryType.event, event=dummy_event, processed=processed, timestamp=now - timedelta(days=day, hours=12))) db.session.flush() # Nothing deleted with the setting's default value clean_old_entries() assert LiveSyncQueueEntry.find().count() == 20 # Nothing deleted when explicitly set to 0 (which is the default) LiveSyncPlugin.settings.set('queue_entry_ttl', 0) clean_old_entries() assert LiveSyncQueueEntry.find().count() == 20 # Only the correct entries deleted, and no unprocessed ones LiveSyncPlugin.settings.set('queue_entry_ttl', 3) clean_old_entries() assert LiveSyncQueueEntry.find(processed=False).count() == 10 assert LiveSyncQueueEntry.find(processed=True).count() == 3
def _apply_changes(sender, **kwargs): excluded_categories = get_excluded_categories() if not hasattr(g, 'livesync_changes'): return for ref, changes in g.livesync_changes.iteritems(): LiveSyncQueueEntry.create(changes, ref, excluded_categories=excluded_categories)
def enqueue(type, change, ids): """Adds the given objects to the LiveSync queues. This is intended to be used if a change was not recorded by LiveSync for some reason and you want to manually force an update. Note that enqueuing a deletion for something that is not deleted may be dangerous and can cause agent runs to fail. By default this util uses the `protection_changed` change type since that way it cascades to all child objects when used on anything except categories. """ model = { EntryType.category: db.m.Category, EntryType.event: db.m.Event, EntryType.contribution: db.m.Contribution, EntryType.subcontribution: db.m.SubContribution, EntryType.session: db.m.Session, EntryType.note: db.m.EventNote, EntryType.attachment: db.m.Attachment, }[type] objs = model.query.filter(model.id.in_(ids)).all() excluded_categories = get_excluded_categories() for obj in objs: click.echo(f'Enqueuing {obj}') LiveSyncQueueEntry.create({change}, obj_ref(obj), excluded_categories=excluded_categories) db.session.commit()
def test_process_records_simplify_deleted_undeleted(db, create_event, dummy_agent): db.session.add(dummy_agent) event = create_event() queue = [ LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.undeleted, agent=dummy_agent, type=EntryType.event, event=event), ] db.session.flush() result = process_records(queue) # this is not ideal (an empty dict would be better here, as being deleted first and THEN being restored), # could cancel each other, but there is no good way to do this without losing the more important # functionality from the test above unless we take the order of changes into account - and with the # cascading logic this is not really possible without cascading each queue entry separately, but doing # so would likely result in worse performance. # see the comment in `process_records` for details assert result == {event: SimpleChange.created}
def test_excluded_categories(mocker, monkeypatch, db, create_category): """Test if category exclusions work.""" plugin = mocker.patch('indico_livesync.plugin.LiveSyncPlugin') plugin.settings.get.return_value = [{'id': 2}, {'id': 3}] categories = {} with db.session.no_autoflush: for cat_id in xrange(6): category = (create_category( cat_id, title=str(cat_id), protection_mode=0, parent=categories[CATEGORY_PARENTS[cat_id]]) if cat_id else Category.get_root()) categories[cat_id] = category db.session.add(category) db.session.flush() db.session.flush() for cat in categories.viewvalues(): db = mocker.patch('indico_livesync.models.queue.db') LiveSyncQueueEntry.create( {ChangeType.created}, obj_ref(cat), excluded_categories=get_excluded_categories()) assert db.session.add.called == (cat.id not in {2, 3, 4, 5})
def test_process_records_simplify(changes, mocker, db, create_event, dummy_agent): """Test if queue entries for the same object are properly simplified""" event1 = create_event(id_=1) event2 = create_event(id_=2) db.session.add(dummy_agent) db.session.add(event1) db.session.add(event2) refs = ({ 'type': EntryType.event, 'event_id': event1.id }, { 'type': EntryType.event, 'event_id': event2.id }) queue = [] changes = changes[:3], changes[3:] expected = [0, 0] for i, ref in enumerate(refs): if changes[i][0]: queue.append( LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.created if changes[i][1]: queue += [ LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref) ] expected[i] |= SimpleChange.updated if changes[i][2]: queue.append( LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.deleted db.session.flush() result = process_records(queue) assert result == process_records( reversed(queue)) # queue order shouldn't matter assert len(result) == sum(1 for x in expected if x) result_refs = {obj.id: change for obj, change in result.viewitems()} for i, ref in enumerate(refs): assert (ref['event_id'] in list(result_refs)) == bool(expected[i]) assert result_refs.get(ref['event_id'], 0) == expected[i]
def clean_old_entries(): """Deletes obsolete entries from the queues""" from indico_livesync.plugin import LiveSyncPlugin from indico_livesync.models.queue import LiveSyncQueueEntry queue_entry_ttl = LiveSyncPlugin.settings.get('queue_entry_ttl') if not queue_entry_ttl: return expire_threshold = now_utc() - timedelta(days=queue_entry_ttl) LiveSyncQueueEntry.find(LiveSyncQueueEntry.processed, LiveSyncQueueEntry.timestamp < expire_threshold).delete(synchronize_session='fetch')
def test_fetch_records(db, dummy_event, dummy_agent): """Test if the correct records are fetched""" backend = DummyBackend(dummy_agent) queue = [ LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event=dummy_event, processed=True), LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event=dummy_event) ] dummy_agent.queue = queue db.session.flush() assert backend.fetch_records() == [queue[1]]
def test_run_failing(mocker, db, create_event, dummy_agent): """Test a failing queue run""" uploader = FailingUploader(MagicMock()) uploader.BATCH_SIZE = 3 events = tuple(create_event(id_=evt_id) for evt_id in xrange(10)) records = tuple(LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event_id=evt.id, agent=dummy_agent) for evt in events) for rec in records: db.session.add(rec) db.session.flush() db_mock = mocker.patch('indico_livesync.uploader.db') uploader.run(records) objs = tuple((record.object, int(SimpleChange.created)) for record in records) assert uploader.logger.exception.called # No uploads should happen after a failed batch assert uploader._uploaded == [(set(objs[:3]), True), (set(objs[3:6]), True)] # Only successful records should be marked as processed assert all(record.processed for record in records[:3]) assert not any(record.processed for record in records[3:]) # Only the first uccessful batch should have triggered a commit assert db_mock.session.commit.call_count == 1
def test_run(mocker, db, create_event, dummy_agent): """Test uploading queued data""" uploader = RecordingUploader(MagicMock()) uploader.BATCH_SIZE = 3 events = tuple(create_event(id_=evt_id) for evt_id in xrange(4)) records = tuple( LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event_id=evt.id, agent=dummy_agent) for evt in events) for rec in records: db.session.add(rec) db.session.flush() db_mock = mocker.patch('indico_livesync.uploader.db') uploader.run(records) objs = tuple( (record.object, int(SimpleChange.created)) for record in records) batches = set(objs[:3]), set(objs[3:]) assert uploader.all_uploaded == [(batches[0], True), (batches[1], True)] # All records should be marked as processed assert all(record.processed for record in records) # Marking records as processed is committed immediately assert db_mock.session.commit.call_count == 2
def test_process_records_cascade(mocker, change, cascade): """Test if certain changes cascade to child elements""" cascade_mock = mocker.patch( 'indico_livesync.simplify._process_cascaded_category_contents') records = [LiveSyncQueueEntry(change=change)] process_records(records) assert cascade_mock.call_args == (({records[0]} if cascade else set(), ), )
def test_run_failing(mocker, monkeypatch, db, create_event, dummy_agent): """Test a failing queue run""" uploader = FailingUploader(TestBackend()) events = tuple(create_event(id_=evt_id) for evt_id in range(10)) records = tuple( LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event_id=evt.id, agent=dummy_agent) for evt in events) for rec in records: db.session.add(rec) db.session.flush() db_mock = mocker.patch('indico_livesync.uploader.db') monkeypatch.setattr( 'indico_livesync.simplify._process_cascaded_event_contents', _sorted_process_cascaded_event_contents) uploader.run(records) assert uploader.logger.exception.called # No uploads should happen after a failed batch assert not uploader._uploaded # No records should be marked as processed assert not any(record.processed for record in records) # And nothing should have been committed db_mock.session.commit.assert_not_called()
def test_run(mocker, monkeypatch, db, create_event, dummy_agent): """Test uploading queued data""" uploader = RecordingUploader(TestBackend()) events = tuple(create_event(id_=evt_id) for evt_id in range(4)) records = tuple( LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event_id=evt.id, agent=dummy_agent) for evt in events) for rec in records: db.session.add(rec) db.session.flush() db_mock = mocker.patch('indico_livesync.uploader.db') monkeypatch.setattr( 'indico_livesync.simplify._process_cascaded_event_contents', _sorted_process_cascaded_event_contents) uploader.run(records) objs = [(record.object, int(SimpleChange.created)) for record in records] assert uploader.all_uploaded == objs # All records should be marked as processed assert all(record.processed for record in records) # After the queue run the changes should be committed assert db_mock.session.commit.call_count == 1
def test_fetch_records_categories_disabled(db, dummy_event, dummy_category, dummy_agent, disabled, whitelisted): """Test if the correct records are fetched""" backend = DummyBackend(dummy_agent) queue = [ LiveSyncQueueEntry(change=ChangeType.protection_changed, type=EntryType.category, category=dummy_category), LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event=dummy_event) ] dummy_agent.queue = queue LiveSyncPlugin.settings.set('skip_category_changes', disabled) db.session.flush() expected = queue[1:] if disabled and not whitelisted else queue whitelist = (dummy_category.id, ) if whitelisted else () assert backend.fetch_records(whitelist) == expected
def test_clean_old_entries(dummy_event, db, dummy_agent): now = now_utc() for processed in (True, False): for day in range(10): db.session.add(LiveSyncQueueEntry(agent=dummy_agent, change=ChangeType.created, type=EntryType.event, event=dummy_event, processed=processed, timestamp=now - timedelta(days=day, hours=12))) db.session.flush() # Nothing deleted with the setting's default value clean_old_entries() assert LiveSyncQueueEntry.find().count() == 20 # Nothing deleted when explicitly set to 0 (which is the default) LiveSyncPlugin.settings.set('queue_entry_ttl', 0) clean_old_entries() assert LiveSyncQueueEntry.find().count() == 20 # Only the correct entries deleted, and no unprocessed ones LiveSyncPlugin.settings.set('queue_entry_ttl', 3) clean_old_entries() assert LiveSyncQueueEntry.find(processed=False).count() == 10 assert LiveSyncQueueEntry.find(processed=True).count() == 3
def test_process_records_category_ignored(mocker, change, invalid): """Test if categories are only kepy for certain changes""" cascade = mocker.patch('indico_livesync.simplify._process_cascaded_category_contents') cascade.return_value = [object()] records = [LiveSyncQueueEntry(change=change, type=EntryType.category)] if invalid: with pytest.raises(AssertionError): process_records(records) else: result = process_records(records) assert len(result) == 1 assert result.values()[0] == SimpleChange.updated
def test_excluded_categories(mocker, monkeypatch, db, create_category): """Test if category exclusions work.""" plugin = mocker.patch('indico_livesync.plugin.LiveSyncPlugin') plugin.settings.get.return_value = [{'id': 2}, {'id': 3}] categories = {} with db.session.no_autoflush: for cat_id in xrange(6): category = (create_category(cat_id, title=str(cat_id), protection_mode=0, parent=categories[CATEGORY_PARENTS[cat_id]]) if cat_id else Category.get_root()) categories[cat_id] = category db.session.add(category) db.session.flush() db.session.flush() for cat in categories.viewvalues(): db = mocker.patch('indico_livesync.models.queue.db') LiveSyncQueueEntry.create({ChangeType.created}, obj_ref(cat), excluded_categories=get_excluded_categories()) assert db.session.add.called == (cat.id not in {2, 3, 4, 5})
def test_process_records_simplify_created_deleted(db, create_event, dummy_agent): db.session.add(dummy_agent) event = create_event() queue = [ LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, type=EntryType.event, event=event), LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, type=EntryType.event, event=event), ] db.session.flush() result = process_records(queue) # creation + deletion should cancel each other out assert result == {}
def test_process_records_category_ignored(dummy_agent, dummy_category, dummy_event, change, invalid, simplified): """Test if categories are only kept for certain changes.""" records = [ LiveSyncQueueEntry(agent=dummy_agent, change=change, type=EntryType.category, category=dummy_category) ] if invalid: with pytest.raises(AssertionError): process_records(records) else: result = process_records(records) assert result == {dummy_event: simplified}
def test_marcxml_run(mocker, db, dummy_event, dummy_agent): """Text if the MARCXML uploader uses the correct function""" mocker.patch('indico_livesync.uploader.db') mocker.patch.object(MARCXMLUploader, 'upload_xml', autospec=True) mxg = mocker.patch('indico_livesync.uploader.MARCXMLGenerator') entry = LiveSyncQueueEntry(change=ChangeType.created, type=EntryType.event, event=dummy_event, agent=dummy_agent) db.session.add(entry) db.session.flush() uploader = MARCXMLUploader(MagicMock()) uploader.run([entry]) assert mxg.records_to_xml.called assert not mxg.objects_to_xml.called assert uploader.upload_xml.called mxg.reset_mock() uploader.run_initial([1]) assert not mxg.records_to_xml.called assert mxg.objects_to_xml.called assert uploader.upload_xml.called
def _apply_changes(sender, **kwargs): if not hasattr(g, 'livesync_changes'): return excluded_categories = get_excluded_categories() for ref, changes in g.livesync_changes.iteritems(): LiveSyncQueueEntry.create(changes, ref, excluded_categories=excluded_categories)