def test_icloud_cancelled_event(db, default_account): with open(absolute_path(FIXTURES + 'icloud_cancelled1.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter( Event.uid == "5919D444-7C99-4687-A526-FC5D10091318").one() assert ev.status == 'confirmed' with open(absolute_path(FIXTURES + 'icloud_cancelled2.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter( Event.uid == "5919D444-7C99-4687-A526-FC5D10091318").one() assert ev.status == 'cancelled'
def test_event_update(db, default_account, message): add_fake_calendar(db.session, default_account.namespace.id, name="Emailed events", read_only=True) with open(absolute_path(FIXTURES + 'gcal_v1.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter( Event.uid == "*****@*****.**").one() assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " "75009 Paris, France") with open(absolute_path(FIXTURES + 'gcal_v2.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter( Event.uid == "*****@*****.**").one() assert ev.location == (u"Le Zenith, 211 Avenue Jean Jaures, " "75019 Paris, France")
def test_icalendar_import(db, generic_account, message): add_fake_calendar(db.session, generic_account.namespace.id, name="Emailed events", read_only=True) with open(absolute_path(FIXTURES + "invite_w_rsvps1.ics")) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg) ev = ( db.session.query(Event) .filter( Event.uid == ( "040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25" ) ) .one() ) assert len(ev.participants) == 2 for participant in ev.participants: assert participant["status"] == "noreply"
def test_cancelled_event(db, default_account): with open(absolute_path(FIXTURES + 'google_cancelled1.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter( Event.uid == "*****@*****.**").one() assert ev.status == 'confirmed' with open(absolute_path(FIXTURES + 'google_cancelled2.ics')) as fd: ics_data = fd.read() msg2 = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg2) db.session.commit() ev = db.session.query(Event).filter( Event.uid == "*****@*****.**").one() assert ev.status == 'cancelled'
def create_message(self, db_session, acct, folder, msg): assert acct is not None and acct.namespace is not None # Check if we somehow already saved the imapuid (shouldn't happen, but # possible due to race condition). If so, don't commit changes. existing_imapuid = db_session.query(ImapUid).filter( ImapUid.account_id == acct.id, ImapUid.folder_id == folder.id, ImapUid.msg_uid == msg.uid).first() if existing_imapuid is not None: log.error('Expected to create imapuid, but existing row found', remote_msg_uid=msg.uid, existing_imapuid=existing_imapuid.id) return None new_uid = common.create_imap_message(db_session, log, acct, folder, msg) new_uid = self.add_message_attrs(db_session, new_uid, msg) # We're calling import_attached_events here instead of some more # obvious place (like Message.create_from_synced) because the function # requires new_uid.message to have been flushed. # This is necessary because the import_attached_events does db lookups. if new_uid.message.has_attached_events: with db_session.no_autoflush: import_attached_events(db_session, acct, new_uid.message) return new_uid
def test_self_sent_update(db, default_account, message): # Create the calendars add_fake_calendar(db.session, default_account.namespace.id, name="Emailed events", read_only=True) default_calendar = add_fake_calendar(db.session, default_account.namespace.id, name="Calendar", read_only=False) # Import the self-sent event. with open(absolute_path(FIXTURES + 'self_sent_v1.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) msg.from_addr = [(default_account.name, default_account.email_address)] import_attached_events(db.session, default_account, msg) db.session.commit() evs = db.session.query(Event).filter( Event.uid == "*****@*****.**").all() assert len(evs) == 1 ev = evs[0] assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " "75009 Paris, France") # Create a copy of the event, and store it in the default calendar. event_copy = Event() event_copy.update(ev) event_copy.calendar = default_calendar db.session.add(event_copy) db.session.commit() with open(absolute_path(FIXTURES + 'self_sent_v2.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() evs = db.session.query(Event).filter( Event.uid == "*****@*****.**").all() # Check that the event in the default calendar didn't get updated. assert len(evs) == 2 for ev in evs: db.session.refresh(ev) if ev.calendar_id == default_calendar.id: assert ev.location == ("Olympia Hall, 28 Boulevard des Capucines, " "75009 Paris, France") else: assert ev.location == (u"Le Zenith, 211 Avenue Jean Jaures, " "75019 Paris, France")
def test_invalid_rsvp(db, default_account): # Test that we don't save an RSVP reply with an invalid id. data = None with open(absolute_path(FIXTURES + "invalid_rsvp.ics")) as fd: data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter(Event.uid == "[email protected]").all() assert len(ev) == 0
def test_recurring_ical(db, default_account): with open(absolute_path(FIXTURES + "gcal_recur.ics")) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter(Event.uid == "*****@*****.**").one() assert isinstance(ev, RecurringEvent) assert isinstance(ev.recurring, list) assert ev.start_timezone == "America/Los_Angeles"
def test_truncate_bogus_sequence_numbers(db, default_account): data = None with open(absolute_path(FIXTURES + "bogus_sequence_number.ics")) as fd: data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter(Event.uid == "*****@*****.**").one() # Check that the sequence number got truncated to the biggest possible # number. assert ev.sequence_number == 2147483647L
def test_rsvp_for_other_provider(db, default_account): # Test that we don't save RSVP replies which aren't replies to a Nylas # invite. data = None with open(absolute_path(FIXTURES + "invalid_rsvp2.ics")) as fd: data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter(Event.uid == "*****@*****.**").all() assert len(ev) == 0
def create_message(self, db_session, acct, folder, msg): assert acct is not None and acct.namespace is not None # Check if we somehow already saved the imapuid (shouldn't happen, but # possible due to race condition). If so, don't commit changes. existing_imapuid = db_session.query(ImapUid).filter( ImapUid.account_id == acct.id, ImapUid.folder_id == folder.id, ImapUid.msg_uid == msg.uid).first() if existing_imapuid is not None: log.error('Expected to create imapuid, but existing row found', remote_msg_uid=msg.uid, existing_imapuid=existing_imapuid.id) return None # Check if the message is valid. # https://sentry.nylas.com/sentry/sync-prod/group/3387/ if msg.body is None: log.warning('Server returned a message with an empty body.') return None new_uid = common.create_imap_message(db_session, acct, folder, msg) self.add_message_to_thread(db_session, new_uid.message, msg) db_session.flush() # We're calling import_attached_events here instead of some more # obvious place (like Message.create_from_synced) because the function # requires new_uid.message to have been flushed. # This is necessary because the import_attached_events does db lookups. if new_uid.message.has_attached_events: with db_session.no_autoflush: import_attached_events(db_session, acct, new_uid.message) # If we're in the polling state, then we want to report the metric # for latency when the message was received vs created if self.state == 'poll': latency_millis = ( datetime.utcnow() - new_uid.message.received_date) \ .total_seconds() * 1000 metrics = [ '.'.join(['accounts', 'overall', 'message_latency']), '.'.join(['providers', self.provider_name, 'message_latency']), ] for metric in metrics: statsd_client.timing(metric, latency_millis) return new_uid
def test_invalid_sender(from_value, db, default_account): # Check that messages with an invalid from field get discarded. data = None event_count = db.session.query(Event).count() with open(absolute_path(FIXTURES + 'iphone_through_exchange.ics')) as fd: data = fd.read() msg = add_fake_msg_with_calendar_part(db.session, default_account, data) msg.from_addr = from_value db.session.add(msg) db.session.commit() import_attached_events(db.session, default_account, msg) db.session.commit() event_count2 = db.session.query(Event).count() assert event_count == event_count2, "The event shouldn't have been added"
def create_message(self, db_session, acct, folder, msg): assert acct is not None and acct.namespace is not None # Check if we somehow already saved the imapuid (shouldn't happen, but # possible due to race condition). If so, don't commit changes. existing_imapuid = ( db_session.query(ImapUid) .filter(ImapUid.account_id == acct.id, ImapUid.folder_id == folder.id, ImapUid.msg_uid == msg.uid) .first() ) if existing_imapuid is not None: log.error( "Expected to create imapuid, but existing row found", remote_msg_uid=msg.uid, existing_imapuid=existing_imapuid.id, ) return None new_uid = common.create_imap_message(db_session, acct, folder, msg) self.add_message_to_thread(db_session, new_uid.message, msg) db_session.flush() # We're calling import_attached_events here instead of some more # obvious place (like Message.create_from_synced) because the function # requires new_uid.message to have been flushed. # This is necessary because the import_attached_events does db lookups. if new_uid.message.has_attached_events: with db_session.no_autoflush: import_attached_events(db_session, acct, new_uid.message) # If we're in the polling state, then we want to report the metric # for latency when the message was received vs created if self.state == "poll": latency_millis = (datetime.utcnow() - new_uid.message.received_date).total_seconds() * 1000 metrics = [ ".".join(["accounts", "overall", "message_latency"]), ".".join(["accounts", str(acct.id), "message_latency"]), ".".join(["providers", self.provider_name, "message_latency"]), ] for metric in metrics: statsd_client.timing(metric, latency_millis) return new_uid
def test_participant_merging(db, default_account, message): add_fake_calendar(db.session, default_account.namespace.id, name="Emailed events", read_only=True) with open(absolute_path(FIXTURES + 'invite_w_rsvps1.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg) db.session.commit() ev = db.session.query(Event).filter( Event.uid == ("040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25")).one() assert len(ev.participants) == 2 for participant in ev.participants: assert participant['status'] == 'noreply' with open(absolute_path(FIXTURES + 'invite_w_rsvps2.ics')) as fd: ics_data = fd.read() msg2 = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg2) db.session.commit() ev = db.session.query(Event).filter( Event.uid == ("040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25")).one() assert len(ev.participants) == 2 for participant in ev.participants: if participant['email'] == '*****@*****.**': assert participant['status'] == 'maybe' assert participant['name'] == 'Inbox Apptest' elif participant['email'] == '*****@*****.**': assert participant['status'] == 'noreply' with open(absolute_path(FIXTURES + 'invite_w_rsvps3.ics')) as fd: ics_data = fd.read() msg3 = add_fake_msg_with_calendar_part( db.session, default_account, ics_data) import_attached_events(db.session, default_account, msg3) db.session.commit() ev = db.session.query(Event).filter( Event.uid == ("040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25")).one() assert len(ev.participants) == 2 for participant in ev.participants: if participant['email'] == '*****@*****.**': assert participant['status'] == 'maybe' assert participant['name'] == 'Inbox Apptest' elif participant['email'] == '*****@*****.**': assert participant['name'] == 'Karim Hamidou' assert participant['status'] == 'yes'
def test_rsvp_merging(db, generic_account, message): # This test checks that RSVPs to invites we sent get merged. # It does some funky stuff around calendars because by default # autoimported invites end up in the "emailed events" calendar. # However, we're simulating invite sending, which supposes using # an event from another calendar. add_fake_calendar(db.session, generic_account.namespace.id, name="Emailed events", read_only=True) cal2 = add_fake_calendar(db.session, generic_account.namespace.id, name="Random calendar", read_only=True) with open(absolute_path(FIXTURES + 'invite_w_rsvps1.ics')) as fd: ics_data = fd.read() msg = add_fake_msg_with_calendar_part( db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg) ev = db.session.query(Event).filter( Event.uid == ("040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25")).one() assert len(ev.participants) == 2 for participant in ev.participants: assert participant['status'] == 'noreply' ev.public_id = "cccc" ev.calendar = cal2 with open(absolute_path(FIXTURES + 'invite_w_rsvps2.ics')) as fd: ics_data = fd.read() msg2 = add_fake_msg_with_calendar_part( db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg2) ev = db.session.query(Event).filter( Event.uid == ("040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25")).one() assert len(ev.participants) == 2 for participant in ev.participants: if participant['email'] == '*****@*****.**': assert participant['status'] == 'maybe' assert participant['name'] == 'Inbox Apptest' elif participant['email'] == '*****@*****.**': assert participant['status'] == 'noreply' with open(absolute_path(FIXTURES + 'invite_w_rsvps3.ics')) as fd: ics_data = fd.read() msg3 = add_fake_msg_with_calendar_part( db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg3) ev = db.session.query(Event).filter( Event.uid == ("040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25")).one() assert len(ev.participants) == 2 for participant in ev.participants: if participant['email'] == '*****@*****.**': assert participant['status'] == 'maybe' assert participant['name'] == 'Inbox Apptest' elif participant['email'] == '*****@*****.**': assert participant['name'] == 'Karim Hamidou' assert participant['status'] == 'yes' # Check that we're handling sequence numbers correctly - i.e: an RSVP # with a sequence number < to the event's sequence number should be # discarded. ev.sequence_number += 1 with open(absolute_path(FIXTURES + 'invite_w_rsvps_4.ics')) as fd: ics_data = fd.read() msg4 = add_fake_msg_with_calendar_part( db.session, generic_account, ics_data) import_attached_events(db.session, generic_account, msg3) ev = db.session.query(Event).filter( Event.uid == ("040000008200E00074C5B7101A82E00800000000" "F9125A30B06BD001000000000000000010000000" "9D791C7548BFD144BFA54F14213CAD25")).one() assert len(ev.participants) == 2 for participant in ev.participants: if participant['email'] == '*****@*****.**': assert participant['status'] == 'maybe' assert participant['name'] == 'Inbox Apptest' elif participant['email'] == '*****@*****.**': assert participant['name'] == 'Karim Hamidou' assert participant['status'] == 'yes'