def migrate_event_series(self): self.print_step("Migrating event series") all_series = self.get_event_series() all_series_ids = set(chain.from_iterable(all_series)) events = { e.id: e for e in Event.find(Event.id.in_(all_series_ids)).options( load_only('id', 'series_id')) } for series in committing_iterator( verbose_iterator(all_series, len(all_series), lambda x: 0, lambda x: '')): series &= events.viewkeys() if len(series) < 2: self.print_warning('Skipping single-event series: {}'.format( sorted(series))) continue es = EventSeries(show_sequence_in_title=False) for id_ in series: events[id_].series = es if not self.quiet: self.print_success(repr(series)) AttachmentFolder.find( AttachmentFolder.title.op('~')('^part\d+$')).update( {AttachmentFolder.is_deleted: True}, synchronize_session=False) db.session.commit()
def get_event_series(self): self.legacy_event_mapping = {x.legacy_event_id: x.event_id for x in LegacyEventMapping.query} self.attachment_folders = defaultdict(set) folder_query = (AttachmentFolder.find(AttachmentFolder.linked_event_id.in_({e.id for e in self._events_query})) .filter(AttachmentFolder.title.op('~')('^part\d+$')) .options(joinedload('attachments'))) for af in folder_query: self.attachment_folders[af.linked_event_id].add(af) series_list = [] series_map = {} for event in self._events_query: series_ids = {event.id} | set(self._extract_event_ids(event)) series = filter(None, (series_map.get(id_) for id_ in series_ids)) if not series: series_list.append(series_ids) for id_ in series_ids: series_map[id_] = series_ids else: assert len(set(map(frozenset, series))) == 1 if series[0] != series_ids: self.print_warning('Inconsistent series found; merging them', event_id=event.id) self.print_warning('Series IDs: {}'.format(sorted(series[0])), event_id=event.id) self.print_warning('Reachable IDs: {}'.format(sorted(series_ids)), event_id=event.id) series[0] |= series_ids for id_ in series_ids: series_map[id_] = series_ids return series_list
def get_event_series(self): self.legacy_event_mapping = {x.legacy_event_id: x.event_id for x in LegacyEventMapping.query} self.attachment_folders = defaultdict(set) folder_query = (AttachmentFolder.find(AttachmentFolder.linked_event_id.in_({e.id for e in self._events_query})) .filter(AttachmentFolder.title.op('~')('^part\d+$')) .options(joinedload('attachments'))) for af in folder_query: self.attachment_folders[af.linked_event_id].add(af) series_list = [] series_map = {} for event in self._events_query: series_ids = {event.id} | set(self._extract_event_ids(event)) series = filter(None, (series_map.get(id_) for id_ in series_ids)) if not series: series_list.append(series_ids) for id_ in series_ids: series_map[id_] = series_ids else: assert len(set(map(frozenset, series))) == 1 if series[0] != series_ids: self.print_warning('Inconsistent series found; merging them', event_id=event.id) self.print_warning('Series IDs: {}'.format(sorted(series[0])), event_id=event.id) self.print_warning('Reachable IDs: {}'.format(sorted(series_ids)), event_id=event.id) series[0] |= series_ids for id_ in series_ids: series_map[id_] = series_ids return series_list
def migrate_event_series(self): self.print_step("Migrating event series") all_series = self.get_event_series() all_series_ids = set(chain.from_iterable(all_series)) events = {e.id: e for e in Event.find(Event.id.in_(all_series_ids)).options(load_only('id', 'series_id'))} for series in committing_iterator(verbose_iterator(all_series, len(all_series), lambda x: 0, lambda x: '')): series &= events.viewkeys() if len(series) < 2: self.print_warning('Skipping single-event series: {}'.format(sorted(series))) continue es = EventSeries(show_sequence_in_title=False) for id_ in series: events[id_].series = es if not self.quiet: self.print_success(repr(series)) AttachmentFolder.find(AttachmentFolder.title.op('~')('^part\d+$')).update({AttachmentFolder.is_deleted: True}, synchronize_session=False) db.session.commit()