def create_abstract(event, abstract_data, custom_fields_data=None, send_notifications=False): abstract = Abstract(event=event, submitter=session.user) tracks = abstract_data.pop('submitted_for_tracks', None) attachments = abstract_data.pop('attachments', None) abstract.populate_from_dict(abstract_data) if tracks is not None: _update_tracks(abstract, tracks) if custom_fields_data: set_custom_fields(abstract, custom_fields_data) db.session.flush() if attachments: add_abstract_files(abstract, attachments['added'], log_action=False) signals.event.abstract_created.send(abstract) if send_notifications: send_abstract_notifications(abstract) logger.info('Abstract %s created by %s', abstract, session.user) abstract.event.log(EventLogRealm.reviewing, EventLogKind.positive, 'Abstracts', 'Abstract {} created'.format(abstract.verbose_title), session.user) return abstract
def create_contribution(event, contrib_data, custom_fields_data=None, session_block=None, extend_parent=False): user = session.user if session else None start_dt = contrib_data.pop('start_dt', None) contrib = Contribution(event=event) contrib.populate_from_dict(contrib_data) if custom_fields_data: set_custom_fields(contrib, custom_fields_data) db.session.flush() if start_dt is not None: schedule_contribution(contrib, start_dt=start_dt, session_block=session_block, extend_parent=extend_parent) signals.event.contribution_created.send(contrib) logger.info('Contribution %s created by %s', contrib, user) contrib.log(EventLogRealm.management, EventLogKind.positive, 'Contributions', f'Contribution {contrib.verbose_title} has been created', user) # Note: If you ever add more stuff here that should run for any new contribution, make sure # to also add it to ContributionCloner.clone_single_contribution return contrib
def create_contribution(event, contrib_data, custom_fields_data=None, session_block=None, extend_parent=False): start_dt = contrib_data.pop('start_dt', None) contrib = Contribution(event_new=event) contrib.populate_from_dict(contrib_data) if start_dt is not None: schedule_contribution(contrib, start_dt=start_dt, session_block=session_block, extend_parent=extend_parent) if custom_fields_data: set_custom_fields(contrib, custom_fields_data) db.session.flush() signals.event.contribution_created.send(contrib) logger.info('Contribution %s created by %s', contrib, session.user) contrib.event_new.log(EventLogRealm.management, EventLogKind.positive, 'Contributions', 'Contribution "{}" has been created'.format(contrib.title), session.user) return contrib
def update_abstract(abstract, abstract_data, custom_fields_data=None): tracks = abstract_data.pop('submitted_for_tracks', None) attachments = abstract_data.pop('attachments', None) changes = {} if tracks is not None and abstract.edit_track_mode == EditTrackMode.both: changes.update(_update_tracks(abstract, tracks)) if attachments: deleted_files = {f for f in abstract.files if f.id in attachments['deleted']} abstract.files = list(set(abstract.files) - deleted_files) delete_abstract_files(abstract, deleted_files) add_abstract_files(abstract, attachments['added']) changes.update(abstract.populate_from_dict(abstract_data)) if custom_fields_data: changes.update(set_custom_fields(abstract, custom_fields_data)) db.session.flush() logger.info('Abstract %s modified by %s', abstract, session.user) log_fields = { 'title': 'Title', 'description': 'Content', 'submission_comment': 'Comment', 'submitted_for_tracks': { 'title': 'Tracks', 'convert': lambda change: [sorted(t.title for t in x) for x in change] }, 'submitted_contrib_type': { 'title': 'Contribution type', 'type': 'string', 'convert': lambda change: [t.name if t else None for t in change] } } for field_name, change in changes.iteritems(): # we skip skip None -> '' changes (editing an abstract that # did not have a value for a new field yet without filling # it out) if not field_name.startswith('custom_') or not any(changes): continue field_id = int(field_name[7:]) field = abstract.event.get_contribution_field(field_id) field_impl = field.field log_fields[field_name] = { 'title': field.title, 'type': field_impl.log_type, 'convert': lambda change, field_impl=field_impl: map(field_impl.get_friendly_value, change) } abstract.event.log(EventLogRealm.management, EventLogKind.change, 'Abstracts', 'Abstract {} modified'.format(abstract.verbose_title), session.user, data={'Changes': make_diff_log(changes, log_fields)})
def update_abstract(abstract, abstract_data, custom_fields_data=None): tracks = abstract_data.pop('submitted_for_tracks', None) attachments = abstract_data.pop('attachments', None) changes = {} if tracks is not None and abstract.edit_track_mode == EditTrackMode.both: changes.update(_update_tracks(abstract, tracks)) if attachments: deleted_files = {f for f in abstract.files if f.id in attachments['deleted']} abstract.files = list(set(abstract.files) - deleted_files) delete_abstract_files(abstract, deleted_files) add_abstract_files(abstract, attachments['added']) changes.update(abstract.populate_from_dict(abstract_data)) if custom_fields_data: changes.update(set_custom_fields(abstract, custom_fields_data)) db.session.flush() logger.info('Abstract %s modified by %s', abstract, session.user) log_fields = { 'title': 'Title', 'description': 'Content', 'submission_comment': 'Comment', 'submitted_for_tracks': { 'title': 'Tracks', 'convert': lambda change: [sorted(t.title for t in x) for x in change] }, 'submitted_contrib_type': { 'title': 'Contribution type', 'type': 'string', 'convert': lambda change: [t.name if t else None for t in change] } } for field_name, change in changes.iteritems(): # we skip skip None -> '' changes (editing an abstract that # did not have a value for a new field yet without filling # it out) if not field_name.startswith('custom_') or not any(changes): continue field_id = int(field_name[7:]) field = abstract.event.get_contribution_field(field_id) field_impl = field.field log_fields[field_name] = { 'title': field.title, 'type': field_impl.log_type, 'convert': lambda change, field_impl=field_impl: map(field_impl.get_friendly_value, change) } abstract.event.log(EventLogRealm.reviewing, EventLogKind.change, 'Abstracts', 'Abstract {} modified'.format(abstract.verbose_title), session.user, data={'Changes': make_diff_log(changes, log_fields)})
def update_contribution(contrib, contrib_data, custom_fields_data=None): """Update a contribution. :param contrib: The `Contribution` to update :param contrib_data: A dict containing the data to update :param custom_fields_data: A dict containing the data for custom fields. :return: A dictionary containing information related to the update. `unscheduled` will be true if the modification resulted in the contribution being unscheduled. In this case `undo_unschedule` contains the necessary data to re-schedule it (undoing the session change causing it to be unscheduled) """ rv = {'unscheduled': False, 'undo_unschedule': None} current_session_block = contrib.session_block start_dt = contrib_data.pop('start_dt', None) if start_dt is not None: update_timetable_entry(contrib.timetable_entry, {'start_dt': start_dt}) with track_location_changes(): changes = contrib.populate_from_dict(contrib_data) if custom_fields_data: changes.update(set_custom_fields(contrib, custom_fields_data)) if 'session' in contrib_data: timetable_entry = contrib.timetable_entry if timetable_entry is not None and _ensure_consistency(contrib): rv['unscheduled'] = True rv['undo_unschedule'] = { 'start_dt': timetable_entry.start_dt.isoformat(), 'contribution_id': contrib.id, 'session_block_id': current_session_block.id if current_session_block else None, 'force': True } db.session.flush() if changes: signals.event.contribution_updated.send(contrib, changes=changes) logger.info('Contribution %s updated by %s', contrib, session.user) contrib.log(EventLogRealm.management, EventLogKind.change, 'Contributions', f'Contribution "{contrib.title}" has been updated', session.user) return rv
def update_contribution(contrib, contrib_data, custom_fields_data=None): """Update a contribution :param contrib: The `Contribution` to update :param contrib_data: A dict containing the data to update :param custom_fields_data: A dict containing the data for custom fields. :return: A dictionary containing information related to the update. `unscheduled` will be true if the modification resulted in the contribution being unscheduled. In this case `undo_unschedule` contains the necessary data to re-schedule it (undoing the session change causing it to be unscheduled) """ rv = {'unscheduled': False, 'undo_unschedule': None} current_session_block = contrib.session_block start_dt = contrib_data.pop('start_dt', None) if start_dt is not None: update_timetable_entry(contrib.timetable_entry, {'start_dt': start_dt}) changes = contrib.populate_from_dict(contrib_data) if custom_fields_data: changes.update(set_custom_fields(contrib, custom_fields_data)) if 'session' in contrib_data: timetable_entry = contrib.timetable_entry if timetable_entry is not None and _ensure_consistency(contrib): rv['unscheduled'] = True rv['undo_unschedule'] = {'start_dt': timetable_entry.start_dt.isoformat(), 'contribution_id': contrib.id, 'session_block_id': current_session_block.id if current_session_block else None, 'force': True} db.session.flush() if changes: signals.event.contribution_updated.send(contrib, changes=changes) logger.info('Contribution %s updated by %s', contrib, session.user) contrib.event_new.log(EventLogRealm.management, EventLogKind.change, 'Contributions', 'Contribution "{}" has been updated'.format(contrib.title), session.user) return rv