def migrate_layout_settings(self): print cformat('%{white!}migrating layout settings, event logos and custom stylesheets') default_styles = self.zodb_root['MaKaCInfo']['main']._styleMgr._defaultEventStylesheet for event, event_type, dmgr, logo, custom_css in committing_iterator(self._iter_event_layout_data()): if event_type != 'conference': theme = dmgr._defaultstyle if not theme or theme == default_styles[event_type]: continue layout_settings.set(event, 'timetable_theme', theme) if not self.quiet: self.print_success(cformat('- %{cyan}Default timetable theme: {}').format(theme), event_id=event.id) continue settings = self._get_event_settings(event, dmgr) layout_settings.set_multi(event, settings) if not self.quiet: self.print_success(cformat('- %{cyan}Layout settings'), event_id=event.id) if logo or custom_css: sa_event = Event.get(event.id) if not sa_event: self.print_warning('Event does not exist (anymore)! Logo and/or CSS file not saved!', event_id=event.id) continue if logo: self._process_logo(logo, sa_event) if custom_css: self._process_css(custom_css, sa_event)
def _process_args(self): self.event = Event.find(id=request.view_args['event_id'], is_deleted=False).first_or_404() self._registration = (self.event.registrations .filter_by(id=request.view_args['registrant_id'], is_deleted=False) .options(joinedload('data').joinedload('field_data')) .first_or_404())
def _query_events(categ_ids, day_start, day_end): event = db.aliased(Event) dates_overlap = lambda t: (t.start_dt >= day_start) & (t.start_dt <= day_end) return (db.session.query(Event.id, TimetableEntry.start_dt) .filter( Event.category_chain_overlaps(categ_ids), ~Event.is_deleted, ((Event.timetable_entries.any(dates_overlap(TimetableEntry))) | (Event.query.exists().where( Event.happens_between(day_start, day_end) & (Event.id == event.id))))) .group_by(Event.id, TimetableEntry.start_dt) .order_by(Event.id, TimetableEntry.start_dt) .join(TimetableEntry, (TimetableEntry.event_id == Event.id) & (dates_overlap(TimetableEntry)), isouter=True))
def _checkParams(self): SearchBase._checkParams(self) self._surName = self._params.get("surName", "") self._name = self._params.get("name", "") self._organisation = self._params.get("organisation", "") self._email = sanitize_email(self._params.get("email", "")) self._exactMatch = self._params.get("exactMatch", False) self._confId = self._params.get("conferenceId", None) self._event = Event.get(self._confId, is_deleted=False) if self._confId else None
def get_not_deletable_templates(obj): """Get all non-deletable templates for an event/category""" not_deletable_criteria = [ DesignerTemplate.is_system_template, DesignerTemplate.backside_template_of != None, # noqa DesignerTemplate.ticket_for_regforms.any(RegistrationForm.event.has(Event.ends_after(now_utc()))) ] return set(DesignerTemplate.query.filter(DesignerTemplate.owner == obj, db.or_(*not_deletable_criteria)))
def _get_upcoming_event(self): query = (Event.query .filter(Event.is_visible_in(self.category), Event.start_dt > now_utc(), ~Event.is_deleted) .options(subqueryload('acl_entries')) .order_by(Event.start_dt, Event.id)) res = get_n_matching(query, 1, lambda event: event.can_access(session.user)) if res: return res[0]
def get_room_events(room, start_dt, end_dt, repeat_frequency, repeat_interval): occurrences = ReservationOccurrence.create_series(start_dt, end_dt, (repeat_frequency, repeat_interval)) excluded_categories = rb_settings.get('excluded_categories') return (Event.query .filter(~Event.is_deleted, Event.own_room == room, db.or_(Event.happens_between(as_utc(occ.start_dt), as_utc(occ.end_dt)) for occ in occurrences), Event.timezone == config.DEFAULT_TIMEZONE, db.and_(Event.category_id != cat['id'] for cat in excluded_categories), Event.acl_entries.any(db.and_(EventPrincipal.type == PrincipalType.user, EventPrincipal.user_id == session.user.id, EventPrincipal.full_access))) .all())
def _category_moved(category, old_parent, new_parent, **kwargs): events = Event.find(Event.category_chain.contains([int(category.id)])).all() # update the category chain of all events from the moved category for event in events: event.category_chain = map(int, reversed(event.category.getCategoryPath())) # update the category chain of all events from the target category for event in g.get('detached_events_moved', set()): # the event was in the target category of of the category move assert event.category_id == int(new_parent.id) # and it is now in the category that has just been moved assert int(event.as_legacy.getOwner().id) == int(category.id) # this will also update the chain (sqlalchemy hook) event.category_id = int(category.id)
def _process(self): if not request.is_xhr: return WPCategory.render_template('display/calendar.html', self.category, start_dt=request.args.get('start_dt')) tz = self.category.display_tzinfo start = tz.localize(dateutil.parser.parse(request.args['start'])).astimezone(utc) end = tz.localize(dateutil.parser.parse(request.args['end'])).astimezone(utc) query = (Event.query .filter(Event.starts_between(start, end), Event.is_visible_in(self.category), ~Event.is_deleted) .options(load_only('id', 'title', 'start_dt', 'end_dt', 'category_id'))) events = self._get_event_data(query) ongoing_events = (Event.query .filter(Event.is_visible_in(self.category), Event.start_dt < start, Event.end_dt > end) .options(load_only('id', 'title', 'start_dt', 'end_dt', 'timezone')) .order_by(Event.title) .all()) return jsonify_data(flash=False, events=events, ongoing_event_count=len(ongoing_events), ongoing_events_html=self._render_ongoing_events(ongoing_events))
def _process_args(self): data = request.json self.object = None if 'categId' in data: self.object = Category.get_one(data['categId']) elif 'contribId' in data: self.object = Contribution.get_one(data['contribId']) elif 'sessionId' in data: self.object = Session.get_one(data['sessionId']) elif 'confId' in data: self.object = Event.get_one(data['confId']) if self.object is None: raise BadRequest
def add_contrib_data(): has_contrib = (EventPerson.contribution_links.any( ContributionPersonLink.contribution.has(~Contribution.is_deleted))) has_subcontrib = EventPerson.subcontribution_links.any( SubContributionPersonLink.subcontribution.has(db.and_( ~SubContribution.is_deleted, SubContribution.contribution.has(~Contribution.is_deleted)))) query = (Event.query .options(load_only('id')) .options(noload('*')) .filter(~Event.is_deleted, Event.ends_after(dt), Event.persons.any((EventPerson.user_id == user.id) & (has_contrib | has_subcontrib)))) for event in query: data[event.id].add('contributor')
def get_events_registered(user, dt=None): """Gets the IDs of events where the user is registered. :param user: A `User` :param dt: Only include events taking place on/after that date :return: A set of event ids """ query = (user.registrations .options(load_only('event_id')) .options(joinedload(Registration.registration_form).load_only('event_id')) .join(Registration.registration_form) .join(RegistrationForm.event) .filter(Registration.is_active, ~RegistrationForm.is_deleted, ~Event.is_deleted, Event.ends_after(dt))) return {registration.event_id for registration in query}
def _process_cascaded_category_contents(records): """ Travel from categories to subcontributions, flattening the whole event structure. Yields everything that it finds (except for elements whose protection has changed but are not inheriting their protection settings from anywhere). :param records: queue records to process """ category_prot_records = {rec.category_id for rec in records if rec.type == EntryType.category and rec.change == ChangeType.protection_changed} category_move_records = {rec.category_id for rec in records if rec.type == EntryType.category and rec.change == ChangeType.moved} changed_events = set() category_prot_records -= category_move_records # A move already implies sending the whole record # Protection changes are handled differently, as there may not be the need to re-generate the record if category_prot_records: for categ in Category.find(Category.id.in_(category_prot_records)): cte = categ.get_protection_parent_cte() # Update only children that inherit inheriting_categ_children = (Event.query .join(cte, db.and_((Event.category_id == cte.c.id), (cte.c.protection_parent == categ.id)))) inheriting_direct_children = Event.find((Event.category_id == categ.id) & Event.is_inheriting) changed_events.update(itertools.chain(inheriting_direct_children, inheriting_categ_children)) # Add move operations and explicitly-passed event records if category_move_records: changed_events.update(Event.find(Event.category_chain_overlaps(category_move_records))) for elem in _process_cascaded_event_contents(records, additional_events=changed_events): yield elem
def add_acl_data(): query = (user.in_contribution_acls .options(load_only('contribution_id', 'roles', 'full_access', 'read_access')) .options(noload('*')) .options(contains_eager(ContributionPrincipal.contribution).load_only('event_id')) .join(Contribution) .join(Event, Event.id == Contribution.event_id) .filter(~Contribution.is_deleted, ~Event.is_deleted, Event.ends_after(dt))) for principal in query: roles = data[principal.contribution.event_id] if 'submit' in principal.roles: roles.add('contribution_submission') if principal.full_access: roles.add('contribution_manager') if principal.read_access: roles.add('contribution_access')
def get_events_registered(user, from_dt=None, to_dt=None): """Gets the IDs of events where the user is registered. :param user: A `User` :param from_dt: The earliest event start time to look for :param to_dt: The latest event start time to look for :return: A set of event ids """ query = (user.registrations .options(load_only('event_id')) .options(joinedload(Registration.registration_form).load_only('event_id')) .join(Registration.registration_form) .join(RegistrationForm.event_new) .filter(Registration.is_active, ~RegistrationForm.is_deleted, ~Event.is_deleted, Event.starts_between(from_dt, to_dt))) return {registration.event_id for registration in query}
def _process_cascaded_event_contents(records, additional_events=None): """ Flatten a series of records into its most basic elements (subcontribution level). Yields results. :param records: queue records to process :param additional_events: events whose content will be included in addition to those found in records """ changed_events = additional_events or set() changed_contributions = set() changed_subcontributions = set() session_records = {rec.session_id for rec in records if rec.type == EntryType.session} contribution_records = {rec.contrib_id for rec in records if rec.type == EntryType.contribution} subcontribution_records = {rec.subcontrib_id for rec in records if rec.type == EntryType.subcontribution} event_records = {rec.event_id for rec in records if rec.type == EntryType.event} if event_records: changed_events.update(Event.find(Event.id.in_(event_records))) for event in changed_events: yield event # Sessions are added (explicitly changed only, since they don't need to be sent anywhere) if session_records: changed_contributions.update(Contribution .find(Contribution.session_id.in_(session_records), ~Contribution.is_deleted)) # Contributions are added (implictly + explicitly changed) changed_event_ids = {ev.id for ev in changed_events} condition = Contribution.event_id.in_(changed_event_ids) & ~Contribution.is_deleted if contribution_records: condition = db.or_(condition, Contribution.id.in_(contribution_records)) contrib_query = Contribution.find(condition).options(joinedload('subcontributions')) for contribution in contrib_query: yield contribution changed_subcontributions.update(contribution.subcontributions) # Same for subcontributions if subcontribution_records: changed_subcontributions.update(SubContribution.find(SubContribution.id.in_(subcontribution_records))) for subcontrib in changed_subcontributions: yield subcontrib
def migrate_event_series(self): self.print_step("Migrating event series") all_series = self.get_event_series() all_series_ids = set(chain.from_iterable(all_series)) events = {e.id: e for e in Event.find(Event.id.in_(all_series_ids)).options(load_only('id', 'series_id'))} for series in committing_iterator(verbose_iterator(all_series, len(all_series), lambda x: 0, lambda x: '')): series &= events.viewkeys() if len(series) < 2: self.print_warning('Skipping single-event series: {}'.format(sorted(series))) continue es = EventSeries(show_sequence_in_title=False) for id_ in series: events[id_].series = es if not self.quiet: self.print_success(repr(series)) AttachmentFolder.find(AttachmentFolder.title.op('~')('^part\d+$')).update({AttachmentFolder.is_deleted: True}, synchronize_session=False) db.session.commit()
def initial_export(agent_id, force): """Performs the initial data export for an agent""" agent = LiveSyncAgent.find_first(id=agent_id) if agent is None: print 'No such agent' return if agent.backend is None: print cformat('Cannot run agent %{red!}{}%{reset} (backend not found)').format(agent.name) return print cformat('Selected agent: %{white!}{}%{reset} ({})').format(agent.name, agent.backend.title) if agent.initial_data_exported and not force: print 'The initial export has already been performed for this agent.' print cformat('To re-run it, use %{yellow!}--force%{reset}') return agent.create_backend().run_initial_export(Event.find(is_deleted=False)) agent.initial_data_exported = True db.session.commit()
def get_matching_events(start_dt, end_dt, repeat_frequency, repeat_interval): """Get events suitable for booking linking. This finds events that overlap with an occurrence of a booking with the given dates where the user is a manager. """ occurrences = ReservationOccurrence.create_series(start_dt, end_dt, (repeat_frequency, repeat_interval)) excluded_categories = rb_settings.get('excluded_categories') return (Event.query .filter(~Event.is_deleted, ~Event.room_reservation_links.any(ReservationLink.reservation.has(Reservation.is_accepted)), db.or_(Event.happens_between(as_utc(occ.start_dt), as_utc(occ.end_dt)) for occ in occurrences), Event.timezone == config.DEFAULT_TIMEZONE, db.and_(Event.category_id != cat.id for cat in excluded_categories), Event.acl_entries.any(db.and_(EventPrincipal.type == PrincipalType.user, EventPrincipal.user_id == session.user.id, EventPrincipal.full_access))) .all())
def _events_query(self): return Event.find( Event.attachment_folders.any( AttachmentFolder.title.op('~')('^part\d+$')))
def get_category_timetable(categ_ids, start_dt, end_dt, detail_level='event', tz=utc, from_categ=None, grouped=True): """Retrieve time blocks that fall within a specific time interval for a given set of categories. :param categ_ids: iterable containing list of category IDs :param start_dt: start of search interval (``datetime``, expected to be in display timezone) :param end_dt: end of search interval (``datetime`` in expected to be in display timezone) :param detail_level: the level of detail of information (``event|session|contribution``) :param tz: the ``timezone`` information should be displayed in :param from_categ: ``Category`` that will be taken into account to calculate visibility :param grouped: Whether to group results by start date :returns: a dictionary containing timetable information in a structured way. See source code for examples. """ day_start = start_dt.astimezone(utc) day_end = end_dt.astimezone(utc) dates_overlap = lambda t: (t.start_dt >= day_start) & (t.start_dt <= day_end) items = defaultdict(lambda: defaultdict(list)) # first of all, query TimetableEntries/events that fall within # specified range of dates (and category set) events = _query_events(categ_ids, day_start, day_end) if from_categ: events = events.filter(Event.is_visible_in(from_categ)) for eid, tt_start_dt in events: if tt_start_dt: items[eid][tt_start_dt.astimezone(tz).date()].append(tt_start_dt) else: items[eid] = None # then, retrieve detailed information about the events event_ids = set(items) query = (Event.find(Event.id.in_(event_ids)) .options(subqueryload(Event.person_links).joinedload(EventPersonLink.person), joinedload(Event.own_room).noload('owner'), joinedload(Event.own_venue), joinedload(Event.category).undefer('effective_icon_data'), undefer('effective_protection_mode'))) scheduled_events = defaultdict(list) ongoing_events = [] events = [] for e in query: if grouped: local_start_dt = e.start_dt.astimezone(tz).date() local_end_dt = e.end_dt.astimezone(tz).date() if items[e.id] is None: # if there is no TimetableEntry, this means the event has not timetable on that interval for day in iterdays(max(start_dt.date(), local_start_dt), min(end_dt.date(), local_end_dt)): # if the event starts on this date, we've got a time slot if day.date() == local_start_dt: scheduled_events[day.date()].append((e.start_dt, e)) else: ongoing_events.append(e) else: for start_d, start_dts in items[e.id].viewitems(): scheduled_events[start_d].append((start_dts[0], e)) else: events.append(e) # result['events'][date(...)] -> [(datetime(....), Event(...))] # result[event_id]['contribs'][date(...)] -> [(TimetableEntry(...), Contribution(...))] # result['ongoing_events'] = [Event(...)] if grouped: result = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) else: result = defaultdict(lambda: defaultdict(list)) result.update({ 'events': scheduled_events if grouped else events, 'ongoing_events': ongoing_events }) # according to detail level, ask for extra information from the DB if detail_level != 'event': query = _query_blocks(event_ids, dates_overlap, detail_level) if grouped: for b in query: start_date = b.timetable_entry.start_dt.astimezone(tz).date() result[b.session.event_id]['blocks'][start_date].append((b.timetable_entry, b)) else: for b in query: result[b.session.event_id]['blocks'].append(b) if detail_level == 'contribution': query = (Contribution.find(Contribution.event_id.in_(event_ids), dates_overlap(TimetableEntry), ~Contribution.is_deleted) .options(contains_eager(Contribution.timetable_entry), joinedload(Contribution.person_links)) .join(TimetableEntry)) if grouped: for c in query: start_date = c.timetable_entry.start_dt.astimezone(tz).date() result[c.event_id]['contribs'][start_date].append((c.timetable_entry, c)) else: for c in query: result[c.event_id]['contributions'].append(c) query = (Break.find(TimetableEntry.event_id.in_(event_ids), dates_overlap(TimetableEntry)) .options(contains_eager(Break.timetable_entry)) .join(TimetableEntry)) if grouped: for b in query: start_date = b.timetable_entry.start_dt.astimezone(tz).date() result[b.timetable_entry.event_id]['breaks'][start_date].append((b.timetable_entry, b)) else: for b in query: result[b.timetable_entry.event_id]['breaks'].append(b) return result
def principal_from_identifier(identifier, allow_groups=False, allow_external_users=False, allow_event_roles=False, allow_category_roles=False, allow_registration_forms=False, allow_emails=False, event_id=None, soft_fail=False): from indico.modules.events.models.events import Event from indico.modules.events.models.roles import EventRole from indico.modules.categories.models.roles import CategoryRole from indico.modules.events.registration.models.forms import RegistrationForm from indico.modules.groups import GroupProxy from indico.modules.users import User try: type_, data = identifier.split(':', 1) except ValueError: raise ValueError('Invalid data') if type_ == 'User': try: user_id = int(data) except ValueError: raise ValueError('Invalid data') user = User.get(user_id, is_deleted=(None if soft_fail else False)) if user is None: raise ValueError('Invalid user: {}'.format(user_id)) return user elif type_ == 'ExternalUser': if not allow_external_users: raise ValueError('External users are not allowed') cache = GenericCache('external-user') external_user_data = cache.get(data) if not external_user_data: raise ValueError('Invalid data') user = User.query.filter(User.all_emails == external_user_data['email'], ~User.is_deleted).first() if user: return user # create a pending user. this user isn't sent to the DB unless it gets added # to the sqlalchemy session somehow (e.g. by adding it to an ACL). # like this processing form data does not result in something being stored in # the database, which is good! return User(first_name=external_user_data['first_name'], last_name=external_user_data['last_name'], email=external_user_data['email'], affiliation=external_user_data['affiliation'], address=external_user_data['address'], phone=external_user_data['phone'], is_pending=True) elif type_ == 'Group': if not allow_groups: raise ValueError('Groups are not allowed') try: provider, name = data.split(':', 1) except ValueError: raise ValueError('Invalid data') if not provider: # local group try: group_id = int(name) except ValueError: raise ValueError('Invalid data') group = GroupProxy(group_id) else: # multipass group group = GroupProxy(name, provider) if not soft_fail and group.group is None: raise ValueError('Invalid group: {}'.format(data)) return group elif type_ == 'EventRole': if not allow_event_roles: raise ValueError('Event roles are not allowed') try: event_role_id = int(data) except ValueError: raise ValueError('Invalid data') event_role = EventRole.get(event_role_id) if event_role is None or event_role.event_id != event_id: raise ValueError('Invalid event role: {}'.format(event_role_id)) return event_role elif type_ == 'CategoryRole': if not allow_category_roles: raise ValueError('Category roles are not allowed') event = Event.get(event_id) if event is None: raise ValueError('Invalid event id: {}'.format(event_id)) try: category_role_id = int(data) except ValueError: raise ValueError('Invalid data') if soft_fail: category_role = CategoryRole.get(category_role_id) else: category_role = CategoryRole.get_category_role_by_id(event.category, category_role_id) if category_role is None: raise ValueError('Invalid category role: {}'.format(category_role_id)) return category_role elif type_ == 'RegistrationForm': if not allow_registration_forms: raise ValueError('Registration forms are not allowed') try: reg_form_id = int(data) except ValueError: raise ValueError('Invalid data') registration_form = RegistrationForm.get(reg_form_id, is_deleted=(None if soft_fail else False)) if registration_form is None or registration_form.event_id != event_id: raise ValueError('Invalid registration form: {}'.format(reg_form_id)) return registration_form elif type_ == 'Email': if not allow_emails: raise ValueError('Emails are not allowed') return EmailPrincipal(data) else: raise ValueError('Invalid data')
def _getParams(self): super(MLZExportBase, self)._getParams() self.event_id = self._pathParams['event'] self.event = Event.get(self.event_id, is_deleted=False) self.flat = get_query_parameter(self._queryParams, ['flat'], False)
def _process_cascaded_category_contents(records): """ Travel from categories to subcontributions, flattening the whole event structure. Yields everything that it finds (except for elements whose protection has changed but are not inheriting their protection settings from anywhere). :param records: queue records to process """ excluded_categories = get_excluded_categories(deep=True) excluded_categories_filter = Event.category_id.notin_( excluded_categories) if excluded_categories else True category_prot_records = { rec.category_id for rec in records if rec.type == EntryType.category and rec.change == ChangeType.protection_changed } category_move_records = { rec.category_id for rec in records if rec.type == EntryType.category and rec.change == ChangeType.moved } category_publishing_records = { rec.category_id for rec in records if rec.type == EntryType.category and rec.change in ( ChangeType.published, ChangeType.unpublished) } changed_events = set() category_prot_records -= category_move_records # A move already implies sending the whole record category_prot_records -= category_publishing_records # A publish/unpublish already implies sending the whole record # Protection changes are handled differently, as there may not be the need to re-generate the record if category_prot_records: for categ in Category.query.filter( Category.id.in_(category_prot_records)): cte = categ.get_protection_parent_cte() # Update only children that inherit inheriting_categ_children = (Event.query.filter( ~Event.is_deleted, excluded_categories_filter).join( cte, db.and_((Event.category_id == cte.c.id), (cte.c.protection_parent == categ.id)))) inheriting_direct_children = Event.query.filter( Event.category_id == categ.id, Event.is_inheriting, ~Event.is_deleted, excluded_categories_filter) changed_events.update( itertools.chain(inheriting_direct_children, inheriting_categ_children)) # Add move operations and explicitly-passed event records if category_move_records: changed_events.update( Event.query.filter( Event.category_chain_overlaps(category_move_records), ~Event.is_deleted, excluded_categories_filter)) if category_publishing_records: changed_events.update( Event.query.filter( Event.category_chain_overlaps(category_publishing_records), ~Event.is_deleted, excluded_categories_filter)) yield from _process_cascaded_event_contents( records, additional_events=changed_events)
def _process_args(self): self.event = Event.find(id=request.view_args['event_id'], is_deleted=False).first_or_404() self._registration = (self.event.registrations.filter_by( id=request.view_args['registrant_id'], is_deleted=False).options( joinedload('data').joinedload('field_data')).first_or_404())
def _get_category_events_query(start_dt, end_dt, category_ids): return (Event.query.filter(~Event.is_deleted, Event.category_chain_overlaps(category_ids), Event.happens_between( start_dt, end_dt)).order_by(Event.start_dt))
def _process_cascaded_event_contents(records, additional_events=None): """ Flatten a series of records into its most basic elements (subcontribution level). Yields results. :param records: queue records to process :param additional_events: events whose content will be included in addition to those found in records """ changed_events = additional_events or set() changed_contributions = set() changed_subcontributions = set() session_records = { rec.session_id for rec in records if rec.type == EntryType.session } contribution_records = { rec.contrib_id for rec in records if rec.type == EntryType.contribution } subcontribution_records = { rec.subcontrib_id for rec in records if rec.type == EntryType.subcontribution } event_records = { rec.event_id for rec in records if rec.type == EntryType.event } if event_records: changed_events.update(Event.find(Event.id.in_(event_records))) for event in changed_events: yield event # Sessions are added (explicitly changed only, since they don't need to be sent anywhere) if session_records: changed_contributions.update( Contribution.find(Contribution.session_id.in_(session_records), ~Contribution.is_deleted)) # Contributions are added (implictly + explicitly changed) changed_event_ids = {ev.id for ev in changed_events} condition = Contribution.event_id.in_( changed_event_ids) & ~Contribution.is_deleted if contribution_records: condition = db.or_(condition, Contribution.id.in_(contribution_records)) contrib_query = Contribution.find(condition).options( joinedload('subcontributions')) for contribution in contrib_query: yield contribution changed_subcontributions.update(contribution.subcontributions) # Same for subcontributions if subcontribution_records: changed_subcontributions.update( SubContribution.find( SubContribution.id.in_(subcontribution_records))) for subcontrib in changed_subcontributions: yield subcontrib
except ValueError: raise ValueError('Invalid data') event_role = EventRole.get(event_role_id) if event_role is None or event_role.event_id != event_id: raise ValueError(f'Invalid event role: {event_role_id}') return event_role elif type_ == 'CategoryRole': if not allow_category_roles: raise ValueError('Category roles are not allowed') category = None if category_id is not None: category = Category.get(category_id) if category is None: raise ValueError(f'Invalid category id: {category_id}') elif event_id is not None: event = Event.get(event_id) if event is None: raise ValueError(f'Invalid event id: {event_id}') category = event.category try: category_role_id = int(data) except ValueError: raise ValueError('Invalid data') if soft_fail: category_role = CategoryRole.get(category_role_id) else: category_role = CategoryRole.get_category_role_by_id( category, category_role_id) if category_role is None: raise ValueError(f'Invalid category role: {category_role_id}') return category_role
def _checkParams(self): self.event = Event.find(id=request.view_args['event_id'], is_deleted=False).first_or_404()
def _process_args(self): self.event = Event.find(id=request.view_args['event_id'], is_deleted=False).first_or_404()
def _get_event_path(self, data): if not (event := Event.get(data['event_id'])): return []
def _process_args(self): self.event_id = request.view_args['event_id'] self.registrant_id = request.view_args['registrant_id'] self.event = Event.get(self.event_id, is_deleted=False)
def _events_query(self): return Event.find(Event.attachment_folders.any(AttachmentFolder.title.op('~')('^part\d+$')))
def get_category_timetable(categ_ids, start_dt, end_dt, detail_level='event', tz=utc, from_categ=None, grouped=True): """Retrieve time blocks that fall within a specific time interval for a given set of categories. :param categ_ids: iterable containing list of category IDs :param start_dt: start of search interval (``datetime``, expected to be in display timezone) :param end_dt: end of search interval (``datetime`` in expected to be in display timezone) :param detail_level: the level of detail of information (``event|session|contribution``) :param tz: the ``timezone`` information should be displayed in :param from_categ: ``Category`` that will be taken into account to calculate visibility :returns: a dictionary containing timetable information in a structured way. See source code for examples. """ day_start = start_dt.astimezone(utc) day_end = end_dt.astimezone(utc) dates_overlap = lambda t: (t.start_dt >= day_start) & (t.start_dt <= day_end) items = defaultdict(lambda: defaultdict(list)) # first of all, query TimetableEntries/events that fall within # specified range of dates (and category set) events = _query_events(categ_ids, day_start, day_end) if from_categ: events = events.filter(Event.is_visible_in(from_categ)) for eid, tt_start_dt in events: if tt_start_dt: items[eid][tt_start_dt.astimezone(tz).date()].append(tt_start_dt) else: items[eid] = None # then, retrieve detailed information about the events event_ids = set(items) query = (Event.find(Event.id.in_(event_ids)).options( subqueryload(Event.person_links).joinedload(EventPersonLink.person), joinedload(Event.own_room).noload('owner'), joinedload(Event.own_venue), joinedload(Event.category), undefer('effective_protection_mode'))) scheduled_events = defaultdict(list) ongoing_events = [] events = [] for e in query: if grouped: local_start_dt = e.start_dt.astimezone(tz).date() local_end_dt = e.end_dt.astimezone(tz).date() if items[e.id] is None: # if there is no TimetableEntry, this means the event has not timetable on that interval for day in iterdays(max(start_dt.date(), local_start_dt), min(end_dt.date(), local_end_dt)): # if the event starts on this date, we've got a time slot if day.date() == local_start_dt: scheduled_events[day.date()].append((e.start_dt, e)) else: ongoing_events.append(e) else: for start_d, start_dts in items[e.id].viewitems(): scheduled_events[start_d].append((start_dts[0], e)) else: events.append(e) # result['events'][date(...)] -> [(datetime(....), Event(...))] # result[event_id]['contribs'][date(...)] -> [(TimetableEntry(...), Contribution(...))] # result['ongoing_events'] = [Event(...)] if grouped: result = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) else: result = defaultdict(lambda: defaultdict(list)) result.update({ 'events': scheduled_events if grouped else events, 'ongoing_events': ongoing_events }) # according to detail level, ask for extra information from the DB if detail_level != 'event': query = _query_blocks(event_ids, dates_overlap, detail_level) if grouped: for b in query: start_date = b.timetable_entry.start_dt.astimezone(tz).date() result[b.session.event_id]['blocks'][start_date].append( (b.timetable_entry, b)) else: for b in query: result[b.session.event_id]['blocks'].append(b) if detail_level == 'contribution': query = (Contribution.find( Contribution.event_id.in_(event_ids), dates_overlap(TimetableEntry), ~Contribution.is_deleted).options( contains_eager(Contribution.timetable_entry), joinedload(Contribution.person_links)).join(TimetableEntry)) if grouped: for c in query: start_date = c.timetable_entry.start_dt.astimezone(tz).date() result[c.event_id]['contribs'][start_date].append( (c.timetable_entry, c)) else: for c in query: result[c.event_id]['contributions'].append(c) query = (Break.find( TimetableEntry.event_id.in_(event_ids), dates_overlap(TimetableEntry)).options( contains_eager(Break.timetable_entry)).join(TimetableEntry)) if grouped: for b in query: start_date = b.timetable_entry.start_dt.astimezone(tz).date() result[ b.timetable_entry.event_id]['breaks'][start_date].append( (b.timetable_entry, b)) else: for b in query: result[b.timetable_entry.event_id]['breaks'].append(b) return result