def handle_event_updates(namespace_id, calendar_id, events, log, db_session): """Persists new or updated Event objects to the database.""" added_count = 0 updated_count = 0 for event in events: assert event.uid is not None, 'Got remote item with null uid' # Note: we could bulk-load previously existing events instead of # loading them one-by-one. This would make the first sync faster, and # probably not really affect anything else. local_event = db_session.query(Event).filter( Event.namespace_id == namespace_id, Event.calendar_id == calendar_id, Event.uid == event.uid).first() if local_event is not None: local_event.update(event) updated_count += 1 else: local_event = event local_event.namespace_id = namespace_id local_event.calendar_id = calendar_id db_session.add(local_event) added_count += 1 # If we just updated/added a recurring event or override, make sure # we link it to the right master event. if isinstance(event, RecurringEvent) or \ isinstance(event, RecurringEventOverride): db_session.flush() link_events(db_session, event) log.info('synced added and updated events', calendar_id=calendar_id, added=added_count, updated=updated_count)
def handle_event_updates(namespace_id, calendar_id, events, log, db_session): """Persists new or updated Event objects to the database.""" added_count = 0 updated_count = 0 existing_event_query = db_session.query(Event).filter( Event.namespace_id == namespace_id, Event.calendar_id == calendar_id).exists() events_exist = db_session.query(existing_event_query).scalar() for event in events: assert event.uid is not None, 'Got remote item with null uid' local_event = None if events_exist: # Skip this lookup if there are no local events at all, for faster # first sync. local_event = db_session.query(Event).filter( Event.namespace_id == namespace_id, Event.calendar_id == calendar_id, Event.uid == event.uid).first() if local_event is not None: # We also need to mark all overrides as cancelled if we're # cancelling a recurring event. However, note the original event # may not itself be recurring (recurrence may have been added). if isinstance(local_event, RecurringEvent) and \ event.status == 'cancelled' and \ local_event.status != 'cancelled': for override in local_event.overrides: override.status = 'cancelled' local_event.update(event) local_event.participants = event.participants updated_count += 1 else: local_event = event local_event.namespace_id = namespace_id local_event.calendar_id = calendar_id db_session.add(local_event) added_count += 1 # If we just updated/added a recurring event or override, make sure # we link it to the right master event. if isinstance(event, RecurringEvent) or \ isinstance(event, RecurringEventOverride): db_session.flush() link_events(db_session, event) # Batch commits to avoid long transactions that may lock calendar rows. if (added_count + updated_count) % 10 == 0: db_session.commit() log.info('synced added and updated events', calendar_id=calendar_id, added=added_count, updated=updated_count)
def handle_event_updates(namespace_id, calendar_id, events, log, db_session): """Persists new or updated Event objects to the database.""" added_count = 0 updated_count = 0 for event in events: assert event.uid is not None, 'Got remote item with null uid' # Note: we could bulk-load previously existing events instead of # loading them one-by-one. This would make the first sync faster, and # probably not really affect anything else. local_event = db_session.query(Event).filter( Event.namespace_id == namespace_id, Event.calendar_id == calendar_id, Event.uid == event.uid).first() if local_event is not None: # We also need to mark all overrides as cancelled if we're # cancelling a recurring event. However, note the original event # may not itself be recurring (recurrence may have been added). if isinstance(local_event, RecurringEvent) and \ event.status == 'cancelled' and \ local_event.status != 'cancelled': for override in local_event.overrides: override.status = 'cancelled' merged_participants = local_event.\ _partial_participants_merge(event) local_event.update(event) # We have to do this mumbo-jumbo because MutableList does # not register changes to nested elements. local_event.participants = [] for participant in merged_participants: local_event.participants.append(participant) updated_count += 1 else: local_event = event local_event.namespace_id = namespace_id local_event.calendar_id = calendar_id db_session.add(local_event) added_count += 1 # If we just updated/added a recurring event or override, make sure # we link it to the right master event. if isinstance(event, RecurringEvent) or \ isinstance(event, RecurringEventOverride): db_session.flush() link_events(db_session, event) log.info('synced added and updated events', calendar_id=calendar_id, added=added_count, updated=updated_count)
def test_link_events_from_override(db, default_account, calendar): # Test that by creating a recurring event and override separately, we # can link them together based on UID and namespace_id when starting # from the override. master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] override = Event(original_start_time=original_start, master_event_uid=master.uid, namespace_id=master.namespace_id, source='local') assert isinstance(override, RecurringEventOverride) link_events(db.session, override) assert override.master == master
def test_linking_events_from_different_calendars(db, default_account, calendar, other_calendar): # Test that two events with the same UID but in different calendars don't # get linked together. This is important because with the Google API, a # recurring events can be in two calendars and have the same UID. # In this case, we create two different recurring events. master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] override = Event(original_start_time=original_start, master_event_uid=master.uid, namespace_id=master.namespace_id, calendar_id=other_calendar.id, uid='blah', source='local') assert isinstance(override, RecurringEventOverride) link_events(db.session, override) assert override.master is None
def test_linking_events_from_different_calendars(db, default_account, calendar, other_calendar): # Test that two events with the same UID but in different calendars don't # get linked together. This is important because with the Google API, a # recurring events can be in two calendars and have the same UID. # In this case, we create two different recurring events. master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] override = Event(original_start_time=original_start, master_event_uid=master.uid, namespace_id=master.namespace_id, calendar_id=other_calendar.id, uid='blah', source='local') assert isinstance(override, RecurringEventOverride) link_events(db.session, override) assert override.master == None
def handle_event_updates(namespace_id, calendar_id, events, log, db_session): """Persists new or updated Event objects to the database.""" added_count = 0 updated_count = 0 for event in events: assert event.uid is not None, 'Got remote item with null uid' # Note: we could bulk-load previously existing events instead of # loading them one-by-one. This would make the first sync faster, and # probably not really affect anything else. local_event = db_session.query(Event).filter( Event.namespace_id == namespace_id, Event.calendar_id == calendar_id, Event.uid == event.uid).first() if local_event is not None: # We also need to mark all overrides as cancelled if we're # cancelling a recurring event. if isinstance(event, RecurringEvent) and \ event.status == 'cancelled' and \ local_event.status != 'cancelled': for override in local_event.overrides: override.status = 'cancelled' local_event.update(event) updated_count += 1 else: local_event = event local_event.namespace_id = namespace_id local_event.calendar_id = calendar_id db_session.add(local_event) added_count += 1 # If we just updated/added a recurring event or override, make sure # we link it to the right master event. if isinstance(event, RecurringEvent) or \ isinstance(event, RecurringEventOverride): db_session.flush() link_events(db_session, event) log.info('synced added and updated events', calendar_id=calendar_id, added=added_count, updated=updated_count)
def test_link_events_from_master(db, default_account, calendar): # Test that by creating a recurring event and override separately, we # can link them together based on UID and namespace_id when starting # from the master event. master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] override = recurring_override_instance(db, master, original_start, master.start, master.end) assert isinstance(master, RecurringEvent) assert len(link_events(db.session, master)) == 1 assert override in master.overrides assert override.uid in [o.uid for o in master.overrides]
def test_link_events_from_master_diff_calendars(db, default_account, calendar, other_calendar): # Same as the previous test except that we check that it doesn't work across # calendars (see test_link_events_from_master_diff_calendars for more # details). master = recurring_event(db, default_account, calendar, TEST_EXDATE_RULE) original_start = parse_exdate(master)[0] override = recurring_override_instance(db, master, original_start, master.start, master.end) override.calendar = other_calendar assert isinstance(master, RecurringEvent) o = link_events(db.session, master) assert len(o) == 0
def populate(): # Populate new classes from the existing data from inbox.models.event import (Event, RecurringEvent, RecurringEventOverride) from inbox.models.session import session_scope from inbox.events.util import parse_datetime from inbox.events.recurring import link_events with session_scope() as db: # Redo recurrence rule population, since we extended the column length print "Repopulating max-length recurrences...", for e in db.query(Event).filter( sa.func.length(Event.recurrence) > 250): try: raw_data = json.loads(e.raw_data) except: try: raw_data = ast.literal_eval(e.raw_data) except: print "Could not load raw data for event {}".format(e.id) continue e.recurrence = raw_data['recurrence'] db.commit() print "done." print "Updating types for Override...", # Slightly hacky way to convert types (only needed for one-off import) convert = """UPDATE event SET type='recurringeventoverride' WHERE raw_data LIKE '%recurringEventId%'""" db.execute(convert) create = """INSERT INTO recurringeventoverride (id) SELECT id FROM event WHERE type='recurringeventoverride' AND id NOT IN (SELECT id FROM recurringeventoverride)""" try: db.execute(create) except Exception as e: print "Couldn't insert RecurringEventOverrides: {}".format(e) exit(2) print "done." c = 0 print "Expanding Overrides .", query = db.query(RecurringEventOverride) for e in query: try: # Some raw data is str(dict), other is json.dumps raw_data = json.loads(e.raw_data) except: try: raw_data = ast.literal_eval(e.raw_data) except: print "Could not load raw data for event {}".format(e.id) continue rec_uid = raw_data.get('recurringEventId') if rec_uid: e.master_event_uid = rec_uid ost = raw_data.get('originalStartTime') if ost: # this is a dictionary with one value start_time = ost.values().pop() e.original_start_time = parse_datetime(start_time) # attempt to get the ID for the event, if we can, and # set the relationship appropriately if raw_data.get('status') == 'cancelled': e.cancelled = True link_events(db, e) c += 1 if c % 100 == 0: print ".", sys.stdout.flush() db.commit() print "done. ({} modified)".format(c) # Convert Event to RecurringEvent print "Updating types for RecurringEvent...", convert = """UPDATE event SET type='recurringevent' WHERE recurrence IS NOT NULL""" db.execute(convert) create = """INSERT INTO recurringevent (id) SELECT id FROM event WHERE type='recurringevent' AND id NOT IN (SELECT id FROM recurringevent)""" try: db.execute(create) except Exception as e: print "Couldn't insert RecurringEvents: {}".format(e) exit(2) print "done." # Pull out recurrence metadata from recurrence c = 0 print "Expanding master events .", query = db.query(RecurringEvent) for r in query: r.unwrap_rrule() try: raw_data = json.loads(r.raw_data) except: try: raw_data = ast.literal_eval(r.raw_data) except: print "Could not load raw data for event {}".format(r.id) continue r.start_timezone = raw_data['start'].get('timeZone') # find any un-found overrides that didn't have masters earlier link_events(db, r) db.add(r) c += 1 if c % 100 == 0: print ".", sys.stdout.flush() db.commit() print "done. ({} modified)".format(c) # Finally, convert all remaining Events to type='event' convert = """UPDATE event SET type='event' WHERE type IS NULL""" db.execute(convert)