def test_process_records_simplify(changes): """Test if queue entries for the same object are properly simplified""" refs = ( LiveSyncQueueEntry(type=EntryType.event, event_id=1).object_ref, LiveSyncQueueEntry(type=EntryType.event, event_id=2).object_ref ) queue = [] changes = changes[:3], changes[3:] expected = [0, 0] for i, ref in enumerate(refs): if changes[i][0]: queue.append(LiveSyncQueueEntry(change=ChangeType.created, **ref)) expected[i] |= SimpleChange.created if changes[i][1]: queue.append(LiveSyncQueueEntry(change=ChangeType.data_changed, **ref)) queue.append(LiveSyncQueueEntry(change=ChangeType.data_changed, **ref)) expected[i] |= SimpleChange.updated if changes[i][2]: queue.append(LiveSyncQueueEntry(change=ChangeType.deleted, **ref)) expected[i] |= SimpleChange.deleted result = process_records(queue) assert result == process_records(reversed(queue)) # queue order shouldn't matter assert len(result) == sum(1 for x in expected if x) for i, ref in enumerate(refs): assert (ref in result) == bool(expected[i]) assert result[ref] == expected[i]
def test_process_records_cascade(mocker, change, cascade): """Test if certain changes cascade to child elements""" cascade_mock = mocker.patch( 'indico_livesync.simplify._process_cascaded_category_contents') records = [LiveSyncQueueEntry(change=change)] process_records(records) assert cascade_mock.call_args == (({records[0]} if cascade else set(), ), )
def test_process_records_simplify(changes, mocker, db, create_event, dummy_agent): """Test if queue entries for the same object are properly simplified""" event1 = create_event(id_=1) event2 = create_event(id_=2) db.session.add(dummy_agent) db.session.add(event1) db.session.add(event2) refs = ({ 'type': EntryType.event, 'event_id': event1.id }, { 'type': EntryType.event, 'event_id': event2.id }) queue = [] changes = changes[:3], changes[3:] expected = [0, 0] for i, ref in enumerate(refs): if changes[i][0]: queue.append( LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.created if changes[i][1]: queue += [ LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref) ] expected[i] |= SimpleChange.updated if changes[i][2]: queue.append( LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.deleted db.session.flush() result = process_records(queue) assert result == process_records( reversed(queue)) # queue order shouldn't matter assert len(result) == sum(1 for x in expected if x) result_refs = {obj.id: change for obj, change in result.viewitems()} for i, ref in enumerate(refs): assert (ref['event_id'] in list(result_refs)) == bool(expected[i]) assert result_refs.get(ref['event_id'], 0) == expected[i]
def test_process_records_category_ignored(mocker, change, invalid): """Test if categories are only kepy for certain changes""" cascade = mocker.patch('indico_livesync.simplify._process_cascaded_category_contents') cascade.return_value = [object()] records = [LiveSyncQueueEntry(change=change, type=EntryType.category)] if invalid: with pytest.raises(AssertionError): process_records(records) else: result = process_records(records) assert len(result) == 1 assert result.values()[0] == SimpleChange.updated
def run(self, records): """Runs the batch upload :param records: an iterable containing queue entries """ self_name = type(self).__name__ for i, batch in enumerate( grouper(records, self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s processing batch %d', self_name, i) try: for j, proc_batch in enumerate( grouper(process_records(batch).iteritems(), self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s uploading chunk #%d (batch %d)', self_name, j, i) self.upload_records({k: v for k, v in proc_batch}, from_queue=True) except Exception: self.logger.exception('%s could not upload batch', self_name) return self.logger.info('%s finished batch %d', self_name, i) self.processed_records(batch) self.logger.info('%s finished', self_name)
def test_process_records_simplify(changes, mocker, db, create_event, dummy_agent): """Test if queue entries for the same object are properly simplified""" event1 = create_event(id_=1) event2 = create_event(id_=2) db.session.add(dummy_agent) db.session.add(event1) db.session.add(event2) refs = ( {'type': EntryType.event, 'event_id': event1.id}, {'type': EntryType.event, 'event_id': event2.id} ) queue = [] changes = changes[:3], changes[3:] expected = [0, 0] for i, ref in enumerate(refs): if changes[i][0]: queue.append(LiveSyncQueueEntry(change=ChangeType.created, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.created if changes[i][1]: queue += [LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref), LiveSyncQueueEntry(change=ChangeType.data_changed, agent=dummy_agent, **ref)] expected[i] |= SimpleChange.updated if changes[i][2]: queue.append(LiveSyncQueueEntry(change=ChangeType.deleted, agent=dummy_agent, **ref)) expected[i] |= SimpleChange.deleted db.session.flush() result = process_records(queue) assert result == process_records(reversed(queue)) # queue order shouldn't matter assert len(result) == sum(1 for x in expected if x) result_refs = {obj.id: change for obj, change in result.viewitems()} for i, ref in enumerate(refs): assert (ref['event_id'] in list(result_refs)) == bool(expected[i]) assert result_refs.get(ref['event_id'], 0) == expected[i]
def run(self, records): """Runs the batch upload :param records: an iterable containing queue entries """ self_name = type(self).__name__ for i, batch in enumerate(grouper(records, self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s processing batch %d', self_name, i) try: for j, proc_batch in enumerate(grouper( process_records(batch).iteritems(), self.BATCH_SIZE, skip_missing=True), 1): self.logger.info('%s uploading chunk #%d (batch %d)', self_name, j, i) self.upload_records({k: v for k, v in proc_batch}, from_queue=True) except Exception: self.logger.exception('%s could not upload batch', self_name) return self.logger.info('%s finished batch %d', self_name, i) self.processed_records(batch) self.logger.info('%s finished', self_name)
def run(self): records = self.fetch_records() if not records: self._print(cformat('%{yellow!}No records%{reset}')) return self._print(cformat('%{white!}Raw changes:%{reset}')) for record in records: self._print(record) self._print() self._print(cformat('%{white!}Simplified/cascaded changes:%{reset}')) for obj, change in process_records(records).items(): self._print(cformat('%{white!}{}%{reset}: {}').format(_change_str(change), obj)) self._print() self._print(cformat('%{white!}Resulting MarcXML:%{reset}')) uploader = DebugUploader(self) uploader.run(records) self.update_last_run()
def run(self): records = self.fetch_records() if not records: self._print(cformat('%{yellow!}No records%{reset}')) return self._print(cformat('%{white!}Raw changes:%{reset}')) for record in records: self._print(record) self._print() self._print(cformat('%{white!}Simplified/cascaded changes:%{reset}')) for obj, change in process_records(records).iteritems(): self._print(cformat('%{white!}{}%{reset}: {}').format(_change_str(change), obj)) self._print() self._print(cformat('%{white!}Resulting MarcXML:%{reset}')) uploader = DebugUploader(self) uploader.run(records) self.update_last_run()
def process_queue(self, uploader, allowed_categories=()): records = self.fetch_records(allowed_categories) if not records: print(cformat('%{yellow!}No records%{reset}')) return print(cformat('%{white!}Raw changes:%{reset}')) for record in records: print(record) print() print(cformat('%{white!}Simplified/cascaded changes:%{reset}')) for obj, change in process_records(records).items(): print( cformat('%{white!}{}%{reset}: {}').format( _change_str(change), obj)) print() print(cformat('%{white!}Resulting records:%{reset}')) uploader._is_queue_run = True uploader.run(records) self.update_last_run()
def test_process_records_cascade(mocker, change, cascade): """Test if certain changes cascade to child elements""" cascade_mock = mocker.patch('indico_livesync.simplify._process_cascaded_category_contents') records = [LiveSyncQueueEntry(change=change)] process_records(records) assert cascade_mock.call_args == (({records[0]} if cascade else set(),),)
def test_process_records_cascade(mocker, change, cascade): """Test if certain changes cascade to child elements""" cascade_mock = mocker.patch('indico_livesync.simplify._cascade') records = [LiveSyncQueueEntry(change=change)] process_records(records) assert cascade_mock.called == cascade