def event(self, idlist): query = (Event.find(Event.id.in_(idlist), ~Event.is_deleted, Event.happens_between(self._fromDT, self._toDT)) .options(*self._get_query_options(self._detail_level))) query = self._update_query(query) return self.serialize_events(x for x in query if self._filter_event(x) and x.can_access(self.user))
def category_extra(self, ids): if self._toDT is None: has_future_events = False else: query = Event.find(Event.category_id.in_(ids), ~Event.is_deleted, Event.start_dt > self._toDT) has_future_events = query.has_rows() return {"eventCategories": self._build_category_path_data(ids), "moreFutureEvents": has_future_events}
def category_extra(self, ids): if self._toDT is None: has_future_events = False else: query = Event.find(Event.category_id.in_(ids), ~Event.is_deleted, Event.start_dt > self._toDT) has_future_events = db.session.query(query.exists()).one()[0] return { 'eventCategories': self._build_category_path_data(ids), 'moreFutureEvents': has_future_events }
def category_extra(self, ids): if self._toDT is None: has_future_events = False else: query = Event.find(Event.category_id.in_(ids), ~Event.is_deleted, Event.start_dt > self._toDT) has_future_events = query.has_rows() return { 'eventCategories': self._build_category_path_data(ids), 'moreFutureEvents': has_future_events }
def event(self, idlist): try: idlist = list(map(int, idlist)) except ValueError: raise HTTPAPIError('Event IDs must be numeric', 400) query = (Event.find( Event.id.in_(idlist), ~Event.is_deleted, Event.happens_between(self._fromDT, self._toDT)).options( *self._get_query_options(self._detail_level))) query = self._update_query(query) return self.serialize_events( x for x in query if self._filter_event(x) and x.can_access(self.user))
def _iter_events(self): event_it = self.zodb_root['conferences'].itervalues() events_query = Event.find(is_deleted=False).order_by(Event.id) event_total = Event.query.count() all_events = {ev.id: ev for ev in events_query} if self.quiet: event_it = verbose_iterator(event_it, event_total, attrgetter('id'), lambda x: '') for conf in self.flushing_iterator(event_it): event = all_events.get(int(conf.id)) if event is None: self.print_error(cformat('%{red!}Event not found in DB'), event_id=conf.id) continue yield conf, event
def migrate_event_managers(self): self.print_step("migrating event managers/creators") creator_updates = [] for event in committing_iterator(self._iter_events(), 5000): self.print_success("", event_id=event.id) ac = event._Conference__ac entries = {} # add creator as a manager try: creator = event._Conference__creator except AttributeError: # events created after the removal of the `self.__creator` assignment # should happen only on dev machines self.print_error(cformat("%{red!}Event has no creator attribute"), event_id=event.id) else: user = self.process_principal(event, entries, creator, "Creator", "green!", full_access=True) if user: creator_updates.append({"event_id": int(event.id), "creator_id": user.id}) # add managers for manager in ac.managers: self.process_principal(event, entries, manager, "Manager", "blue!", full_access=True) # add email-based managers emails = getattr(ac, "managersEmail", []) self.process_emails(event, entries, emails, "Manager", "green", full_access=True) # add registrars for registrar in getattr(event, "_Conference__registrars", []): self.process_principal(event, entries, registrar, "Registrar", "cyan", roles={"registration"}) # add submitters for submitter in getattr(ac, "submitters", []): self.process_principal(event, entries, submitter, "Submitter", "magenta!", roles={"submit"}) # email-based (pending) submitters pqm = getattr(event, "_pendingQueuesMgr", None) if pqm is not None: emails = set(getattr(pqm, "_pendingConfSubmitters", [])) self.process_emails(event, entries, emails, "Submitter", "magenta", roles={"submit"}) db.session.add_all(entries.itervalues()) # assign creators if creator_updates: self.print_step("saving event creators") stmt = ( Event.__table__.update() .where(Event.id == db.bindparam("event_id")) .values(creator_id=db.bindparam("creator_id")) ) db.session.execute(stmt, creator_updates) updated = Event.find(Event.creator_id == None).update({Event.creator_id: self.janitor.id}) # noqa db.session.commit() self.print_success("Set the janitor user {} for {} events".format(self.janitor, updated), always=True)
def migrate_event_managers(self): self.print_step('migrating event managers/creators') creator_updates = [] for event in committing_iterator(self._iter_events(), 5000): self.print_success('', event_id=event.id) ac = event._Conference__ac entries = {} # add creator as a manager try: creator = event._Conference__creator except AttributeError: # events created after the removal of the `self.__creator` assignment # should happen only on dev machines self.print_error(cformat('%{red!}Event has no creator attribute'), event_id=event.id) else: user = self.process_principal(event, entries, creator, 'Creator', 'green!', full_access=True) if user: creator_updates.append({'event_id': int(event.id), 'creator_id': user.id}) # add managers for manager in ac.managers: self.process_principal(event, entries, manager, 'Manager', 'blue!', full_access=True) # add email-based managers emails = getattr(ac, 'managersEmail', []) self.process_emails(event, entries, emails, 'Manager', 'green', full_access=True) # add registrars for registrar in getattr(event, '_Conference__registrars', []): self.process_principal(event, entries, registrar, 'Registrar', 'cyan', roles={'registration'}) # add submitters for submitter in getattr(ac, 'submitters', []): self.process_principal(event, entries, submitter, 'Submitter', 'magenta!', roles={'submit'}) # email-based (pending) submitters pqm = getattr(event, '_pendingQueuesMgr', None) if pqm is not None: emails = set(getattr(pqm, '_pendingConfSubmitters', [])) self.process_emails(event, entries, emails, 'Submitter', 'magenta', roles={'submit'}) db.session.add_all(entries.itervalues()) # assign creators if creator_updates: self.print_step('saving event creators') stmt = (Event.__table__.update() .where(Event.id == db.bindparam('event_id')) .values(creator_id=db.bindparam('creator_id'))) db.session.execute(stmt, creator_updates) updated = Event.find(Event.creator_id == None).update({Event.creator_id: self.janitor.id}) # noqa db.session.commit() self.print_success('Set the janitor user {} for {} events'.format(self.janitor, updated), always=True)
def _iterate_objs(query_string): query = Event.find(Event.title_matches(to_unicode(query_string)), ~Event.is_deleted) if self._orderBy == 'start': query = query.order_by(Event.start_dt) elif self._orderBy == 'id': query = query.order_by(Event.id) counter = 0 # Query the DB in chunks of 1000 records per query until the limit is satisfied for event in query.yield_per(1000): if event.can_access(self._aw.getUser().user if self._aw.getUser() else None): counter += 1 # Start yielding only when the counter reaches the given offset if (self._offset is None) or (counter > self._offset): yield event # Stop querying the DB when the limit is satisfied if (self._limit is not None) and (counter == self._offset + self._limit): break
def migrate(self): all_series = self.get_event_series() all_series_ids = set(chain.from_iterable(all_series)) events = {e.id: e for e in Event.find(Event.id.in_(all_series_ids)).options(load_only('id', 'series_id'))} for series in all_series: series &= set(events) if len(series) < 2: self.print_warning('Skipping single-event series: {}'.format(sorted(series))) continue es = EventSeries(show_sequence_in_title=False) for id_ in series: events[id_].series = es if not self.quiet: self.print_success(repr(series)) (AttachmentFolder.query .filter(AttachmentFolder.title.op('~')('^part\d+$')) .update({AttachmentFolder.is_deleted: True}, synchronize_session=False)) db.session.commit()
def _iter_events(self): event_it = self.zodb_root['conferences'].itervalues() events_query = Event.find(is_deleted=False).order_by(Event.id) event_total = len(self.zodb_root['conferences']) if self.parallel: n, i = self.parallel event_it = (e for e in event_it if int(e.id) % n == i) event_total = int(ceil(event_total / n)) events_query = events_query.filter(Event.id % n == i) all_events = {ev.id: ev for ev in events_query} if self.quiet: event_it = verbose_iterator(event_it, event_total, attrgetter('id'), attrgetter('title')) for conf in self.flushing_iterator(event_it): event = all_events.get(int(conf.id)) if event is None: self.print_error(cformat('%{red!}Event not found in DB'), event_id=conf.id) continue yield conf, event
def preload_events(ids, lightweight=True, persons=False): """Preload events so they are in SA's identity cache This is useful for legacy pages where we have to show large numbers of events without being able to query them from the db cleanly. :param ids: An iterable of IDs or Conference objects :param lightweight: Only load dates and title :param persons: Also load the person links """ cache = g.setdefault('_event_cache', {}) ids = {int(getattr(id_, 'id', id_)) for id_ in ids} - cache.viewkeys() query = Event.find(Event.id.in_(ids)) if lightweight: query = query.options(load_only('id', 'title', 'start_dt', 'end_dt', 'timezone')) if persons: query = query.options(joinedload('person_links')) cache.update((e.id, e) for e in query)
def preload_events(ids, lightweight=True, persons=False): """Preload events so they are in SA's identity cache This is useful for legacy pages where we have to show large numbers of events without being able to query them from the db cleanly. :param ids: An iterable of IDs or Conference objects :param lightweight: Only load dates and title :param persons: Also load the person links """ cache = g.setdefault('_event_cache', {}) ids = {int(getattr(id_, 'id', id_)) for id_ in ids} - cache.viewkeys() query = Event.find(Event.id.in_(ids)) if lightweight: query = query.options( load_only('id', 'title', 'start_dt', 'end_dt', 'timezone')) if persons: query = query.options(joinedload('person_links')) cache.update((e.id, e) for e in query)
def _iterate_objs(query_string): query = Event.find(Event.title_matches(to_unicode(query_string)), ~Event.is_deleted) if self._orderBy == 'start': query = query.order_by(Event.start_dt) elif self._orderBy == 'id': query = query.order_by(Event.id) counter = 0 # Query the DB in chunks of 1000 records per query until the limit is satisfied for event in query.yield_per(1000): if event.can_access(self._aw.getUser().user if self._aw. getUser() else None): counter += 1 # Start yielding only when the counter reaches the given offset if (self._offset is None) or (counter > self._offset): yield event # Stop querying the DB when the limit is satisfied if (self._limit is not None) and ( counter == self._offset + self._limit): break
def _events_query(self): return Event.find( Event.attachment_folders.any( AttachmentFolder.title.op('~')('^part\d+$')))