def create(self, docs, **kwargs): events_service = get_resource_service('events') history_service = get_resource_service('events_history') parent_id = request.view_args['item_id'] parent_event = events_service.find_one(req=None, _id=parent_id) new_event_ids = [] for doc in docs: new_event = self._duplicate_doc(parent_event) new_ids = events_service.post([new_event]) doc.update(new_event) history_service.on_item_created([new_event]) new_event_ids = new_ids for new_id in new_ids: history_service.on_item_updated({'duplicate_id': new_id}, parent_event, 'duplicate') history_service.on_item_updated({'duplicate_id': parent_id}, new_event, 'duplicate_from') duplicate_ids = parent_event.get('duplicate_to', []) duplicate_ids.extend(new_event_ids) events_service.patch(parent_id, {'duplicate_to': duplicate_ids}) app.on_updated_events({'duplicate_to': duplicate_ids}, {'_id': parent_id}) return new_event_ids
def on_created(self, docs): """Send WebSocket Notifications for created Events Generate the list of IDs for recurring and non-recurring events Then send this list off to the clients so they can fetch these events """ notifications_sent = [] history_service = get_resource_service('events_history') for doc in docs: event_id = str(doc.get(config.ID_FIELD)) # If we duplicated this event, update the history if doc.get('duplicate_from'): parent_id = doc['duplicate_from'] parent_event = self.find_one(req=None, _id=parent_id) history_service.on_item_updated({'duplicate_id': event_id}, parent_event, 'duplicate') history_service.on_item_updated({'duplicate_id': parent_id}, doc, 'duplicate_from') duplicate_ids = parent_event.get('duplicate_to', []) duplicate_ids.append(event_id) self.patch(parent_id, {'duplicate_to': duplicate_ids}) app.on_updated_events({'duplicate_to': duplicate_ids}, {'_id': parent_id}) event_type = 'events:created' user_id = str(doc.get('original_creator', '')) if doc.get('recurrence_id'): event_type = 'events:created:recurring' event_id = str(doc['recurrence_id']) # Don't send notification if one has already been sent # This is to ensure recurring events doesn't send multiple notifications if event_id in notifications_sent or 'previous_recurrence_id' in doc: continue notifications_sent.append(event_id) push_notification(event_type, item=event_id, user=user_id)
def _update_metadata_recurring(self, updates, original, update_method): """Update the Metadata for a series of recurring events Based on the update_method, it will update: single: the provided event only future: the provided event, and all future events all: all events in the series """ events = [] if update_method == UPDATE_FUTURE: historic, past, future = self.get_recurring_timeline(original) events.extend(future) elif update_method == UPDATE_ALL: historic, past, future = self.get_recurring_timeline(original) events.extend(historic) events.extend(past) events.extend(future) for e in events: self.patch(e[config.ID_FIELD], updates) app.on_updated_events(updates, {'_id': e[config.ID_FIELD]})
def _update_recurring_events(self, updates, original, update_method): """Method to update recurring events. If the recurring_rule has been removed for this event, process it separately, otherwise update the event and/or its recurring rules """ # This method now only handles updating of Event metadata # So make sure to remove any date information that might be in # the updates updates.pop('dates', None) if update_method == UPDATE_FUTURE: historic, past, future = self.get_recurring_timeline(original) events = future else: historic, past, future = self.get_recurring_timeline(original) events = historic + past + future events_post_service = get_resource_service('events_post') # First we want to validate that all events can be posted for e in events: if post_required(updates, e): merged = deepcopy(e) merged.update(updates) events_post_service.validate_item(merged) for e in events: event_id = e[config.ID_FIELD] new_updates = deepcopy(updates) new_updates['skip_on_update'] = True new_updates[config.ID_FIELD] = event_id self.patch(event_id, new_updates) app.on_updated_events(new_updates, {'_id': event_id}) # And finally push a notification to connected clients push_notification('events:updated:recurring', item=str(original[config.ID_FIELD]), recurrence_id=str(original['recurrence_id']), user=str(updates.get('version_creator', '')))
def _update_recurring_events(self, updates, original, update_method): """Method to update recurring events. If the recurring_rule has been removed for this event, process it separately, otherwise update the event and/or its recurring rules """ # This method now only handles updating of Event metadata # So make sure to remove any date information that might be in # the updates updates.pop('dates', None) if update_method == UPDATE_FUTURE: historic, past, future = self.get_recurring_timeline(original) events = future else: historic, past, future = self.get_recurring_timeline(original) events = historic + past + future events_post_service = get_resource_service('events_post') # First we want to validate that all events can be posted for e in events: if post_required(updates, e): merged = deepcopy(e) merged.update(updates) events_post_service.validate_item(merged) # If this update is from assignToCalendar action # Then we only want to update the calendars of each Event only_calendars = original.get('lock_action') == 'assign_calendar' original_calendar_qcodes = [ calendar['qcode'] for calendar in original.get('calendars') or [] ] # Get the list of calendars added updated_calendars = [ calendar for calendar in updates.get('calendars') or [] if calendar['qcode'] not in original_calendar_qcodes ] mark_completed = original.get( 'lock_action') == 'mark_completed' and updates.get('actioned_date') mark_complete_validated = False for e in events: event_id = e[config.ID_FIELD] new_updates = deepcopy(updates) new_updates['skip_on_update'] = True new_updates[config.ID_FIELD] = event_id if only_calendars: # Get the original for this item, and add new calendars to it # Skipping calendars already assigned to this item original_event = self.find_one(req=None, _id=event_id) original_qcodes = [ calendar['qcode'] for calendar in original_event.get('calendars') or [] ] new_updates['calendars'] = deepcopy( original_event.get('calendars') or []) new_updates['calendars'].extend([ calendar for calendar in updated_calendars if calendar['qcode'] not in original_qcodes ]) elif mark_completed: self.mark_event_complete(original, updates, e, mark_complete_validated) # It is validated if the previous funciton did not raise an error mark_complete_validated = True self.patch(event_id, new_updates) app.on_updated_events(new_updates, {'_id': event_id}) # And finally push a notification to connected clients push_notification('events:updated:recurring', item=str(original[config.ID_FIELD]), recurrence_id=str(original['recurrence_id']), user=str(updates.get('version_creator', '')))
def _patch_event_in_recurrent_series(self, event_id, updated_event): updated_event['skip_on_update'] = True self.patch(event_id, updated_event) app.on_updated_events(updated_event, {'_id': event_id})
def _update_recurring_events(self, updates, original): """Method to update recurring events. If the recurring_rule has been removed for this event, process it separately, otherwise update the event and/or its recurring rules """ merged = copy.deepcopy(original) merged.update(updates) if updates.get('dates'): if not updates['dates'].get('recurring_rule', None): # Recurring rule has been removed for this event, # Remove this rule and return from this method self._remove_recurring_rules(updates, original) push_notification( 'events:updated', item=str(original[config.ID_FIELD]), user=str(updates.get('version_creator')) ) return # Generate the list of changes to the series of events based on the # new recurring_rules new_events, updated_events, deleted_events = self._update_recurring_rules(updates, original) else: # We only update events here, so get the list of future events updated_events = self._get_future_events(merged) new_events = deleted_events = [] # Create instances for the new events, and save them to mongo/elastic # Then fire off events for them added_events = [] for e in new_events: event = copy.deepcopy(merged) event['dates']['start'] = e['dates']['start'] event['dates']['end'] = e['dates']['end'] event['_id'] = event['guid'] = generate_guid(type=GUID_NEWSML) added_events.append(event) if added_events: self.create(added_events) app.on_inserted_events(added_events) # For all the deleted events, remove them from mongo/elastic # Then fire off events for them for e in deleted_events: self.delete({'_id': e[config.ID_FIELD]}) app.on_deleted_item_events(e) # For all the updated events, update their dates/metadata # Then fire off events for them for e in updated_events: if e[config.ID_FIELD] == original[config.ID_FIELD]: continue updated_event = copy.deepcopy(updates) if 'dates' not in updated_event: updated_event['dates'] = original['dates'] if 'guid' in updated_event: del updated_event['guid'] updated_event['dates']['start'] = e['dates']['start'] updated_event['dates']['end'] = e['dates']['end'] updated_event['skip_on_update'] = True self.patch(e[config.ID_FIELD], updated_event) app.on_updated_events(updated_event, {'_id': e[config.ID_FIELD]}) # And finally push a notification to connected clients push_notification( 'events:updated:recurring', item=str(original[config.ID_FIELD]), recurrence_id=str(original['recurrence_id']), user=str(updates.get('version_creator', '')) )