def test_actions_are_claimed(purge_accounts_and_actions, patched_task): with session_scope_by_shard_id(0) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(0)) schedule_test_action(db_session, account) with session_scope_by_shard_id(1) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(1)) schedule_test_action(db_session, account) service = SyncbackService( syncback_id=0, process_number=1, total_processes=2, num_workers=2) service._restart_workers() service._process_log() while not service.task_queue.empty(): gevent.sleep(0) with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status == 'pending' for a in q) with session_scope_by_shard_id(1) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status != 'pending' for a in q)
def test_external_sync_disabling(monkeypatch, db, mock_queue_client): monkeypatch.setattr('psutil.cpu_percent', lambda *args, **kwargs: [10.0, 25.0]) purge_other_accounts() account = add_generic_imap_account(db.session, email_address='*****@*****.**') other_account = add_generic_imap_account(db.session, email_address='*****@*****.**') qp = QueuePopulator(zone='testzone') qp.queue_client = mock_queue_client s = patched_sync_service(db, mock_queue_client) qp.enqueue_new_accounts() s.poll() s.poll() assert len(s.syncing_accounts) == 2 account.mark_deleted() db.session.commit() assert account.sync_should_run is False assert account._sync_status['sync_disabled_reason'] == 'account deleted' account.mark_invalid() db.session.commit() assert account.sync_should_run is False assert account.sync_state == 'invalid' assert account._sync_status['sync_disabled_reason'] == \ 'invalid credentials' qp.unassign_disabled_accounts() s.poll() assert s.syncing_accounts == {other_account.id}
def test_actions_are_claimed(purge_accounts_and_actions, patched_worker): with session_scope_by_shard_id(0) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(0)) schedule_test_action(db_session, account) with session_scope_by_shard_id(1) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(1)) schedule_test_action(db_session, account) service = SyncbackService(cpu_id=1, total_cpus=2) service.workers = set() service._process_log() gevent.joinall(list(service.workers)) with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status == 'pending' for a in q) with session_scope_by_shard_id(1) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status != 'pending' for a in q)
def test_actions_are_claimed(purge_accounts_and_actions, patched_task): with session_scope_by_shard_id(0) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(0)) schedule_test_action(db_session, account) with session_scope_by_shard_id(1) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(1)) schedule_test_action(db_session, account) service = SyncbackService(syncback_id=0, process_number=1, total_processes=2, num_workers=2) service._restart_workers() service._process_log() while not service.task_queue.empty(): gevent.sleep(0.1) with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status == 'pending' for a in q) with session_scope_by_shard_id(1) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status != 'pending' for a in q)
def test_external_sync_disabling(monkeypatch, db): purge_other_accounts() account = add_generic_imap_account(db.session, email_address="*****@*****.**") other_account = add_generic_imap_account(db.session, email_address="*****@*****.**") account.sync_host = None account.desired_sync_host = None other_account.sync_host = None other_account.desired_sync_host = None db.session.commit() s = patched_sync_service(db) s.poll_shared_queue({"queue_name": "foo", "id": account.id}) s.poll_shared_queue({"queue_name": "foo", "id": other_account.id}) assert len(s.syncing_accounts) == 2 account.mark_for_deletion() db.session.commit() assert account.sync_should_run is False assert account._sync_status["sync_disabled_reason"] == "account deleted" account.mark_invalid() db.session.commit() assert account.sync_should_run is False assert account.sync_state == "invalid" assert account._sync_status[ "sync_disabled_reason"] == "invalid credentials" s.poll({"queue_name": "foo"}) assert s.syncing_accounts == {other_account.id}
def test_external_sync_disabling(db, mock_queue_client): purge_other_accounts() account = add_generic_imap_account(db.session, email_address='*****@*****.**') other_account = add_generic_imap_account( db.session, email_address='*****@*****.**') qp = QueuePopulator(zone='testzone') qp.queue_client = mock_queue_client s = patched_sync_service(db, mock_queue_client) qp.enqueue_new_accounts() s.poll() s.poll() assert len(s.syncing_accounts) == 2 account.mark_deleted() db.session.commit() assert account.sync_should_run is False assert account._sync_status['sync_disabled_reason'] == 'account deleted' account.mark_invalid() db.session.commit() assert account.sync_should_run is False assert account.sync_state == 'invalid' assert account._sync_status['sync_disabled_reason'] == \ 'invalid credentials' qp.unassign_disabled_accounts() s.poll() assert s.syncing_accounts == {other_account.id}
def test_actions_are_claimed(purge_accounts_and_actions, patched_worker): with session_scope_by_shard_id(0) as db_session: account = add_generic_imap_account(db_session, email_address='{}@test.com'.format(0)) schedule_test_action(db_session, account) with session_scope_by_shard_id(1) as db_session: account = add_generic_imap_account(db_session, email_address='{}@test.com'.format(1)) schedule_test_action(db_session, account) service = SyncbackService(cpu_id=1, total_cpus=2) service.workers = set() service._process_log() gevent.joinall(list(service.workers)) with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status == 'pending' for a in q) with session_scope_by_shard_id(1) as db_session: q = db_session.query(ActionLog) assert q.count() == 1 assert all(a.status != 'pending' for a in q)
def test_generic_grouping(db, default_account): thread = add_fake_thread(db.session, default_account.namespace.id) message = add_fake_message(db.session, default_account.namespace.id, thread, subject="Golden Gate Park next Sat") folder = Folder(account=default_account, name='Inbox', canonical_name='inbox') ImapUid(message=message, account_id=default_account.id, msg_uid=2222, folder=folder) thread = add_fake_thread(db.session, default_account.namespace.id) account = add_generic_imap_account(db.session) message = add_fake_message(db.session, account.namespace.id, thread, subject="Golden Gate Park next Sat") thread = fetch_corresponding_thread(db.session, default_account.namespace.id, message) assert thread is None, ("fetch_similar_threads should " "heed namespace boundaries")
def add_fake_imap_account(db_session, provider, email_address, password): account = add_generic_imap_account(db_session) account.provider = provider account.email_address = email_address account.imap_password = password account.smtp_password = password db_session.commit() return account
def test_actions_for_invalid_accounts_are_skipped(purge_accounts_and_actions, patched_task): with session_scope_by_shard_id(0) as db_session: account = add_generic_imap_account(db_session, email_address='*****@*****.**') schedule_test_action(db_session, account) namespace_id = account.namespace.id count = db_session.query(ActionLog).filter( ActionLog.namespace_id == namespace_id).count() assert account.sync_state != 'invalid' another_account = add_generic_imap_account( db_session, email_address='*****@*****.**') schedule_test_action(db_session, another_account) another_namespace_id = another_account.namespace.id another_count = db_session.query(ActionLog).filter( ActionLog.namespace_id == another_namespace_id).count() assert another_account.sync_state != 'invalid' account.mark_invalid() db_session.commit() service = SyncbackService(syncback_id=0, process_number=0, total_processes=2, num_workers=2) service._process_log() while not service.task_queue.empty(): gevent.sleep(0) with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog).filter( ActionLog.namespace_id == namespace_id, ActionLog.status == 'pending') assert q.count() == count q = db_session.query(ActionLog).filter( ActionLog.namespace_id == another_namespace_id) assert q.filter(ActionLog.status == 'pending').count() == 0 assert q.filter( ActionLog.status == 'successful').count() == another_count
def test_namespace_deletion(db, default_account): from inbox.models import (Account, Thread, Message, Block, Contact, Event, Transaction) from inbox.models.util import delete_namespace models = [Thread, Message] namespace = default_account.namespace namespace_id = namespace.id account_id = default_account.id account = db.session.query(Account).get(account_id) assert account thread = add_fake_thread(db.session, namespace_id) message = add_fake_message(db.session, namespace_id, thread) for m in models: c = db.session.query(m).filter( m.namespace_id == namespace_id).count() print "count for", m, ":", c assert c != 0 fake_account = add_generic_imap_account(db.session) fake_account_id = fake_account.id assert fake_account_id != account.id and \ fake_account.namespace.id != namespace_id thread = add_fake_thread(db.session, fake_account.namespace.id) thread_id = thread.id message = add_fake_message(db.session, fake_account.namespace.id, thread) message_id = message.id # Delete namespace, verify data corresponding to this namespace /only/ # is deleted delete_namespace(account_id, namespace_id) db.session.commit() account = db.session.query(Account).get(account_id) assert not account for m in models: assert db.session.query(m).filter( m.namespace_id == namespace_id).count() == 0 fake_account = db.session.query(Account).get(fake_account_id) assert fake_account thread = db.session.query(Thread).get(thread_id) message = db.session.query(Message).get(message_id) assert thread and message
def test_namespace_deletion(db, default_account): from inbox.models import Account, Thread, Message from inbox.models.util import delete_namespace models = [Thread, Message] namespace = default_account.namespace namespace_id = namespace.id account_id = default_account.id account = db.session.query(Account).get(account_id) assert account thread = add_fake_thread(db.session, namespace_id) message = add_fake_message(db.session, namespace_id, thread) for m in models: c = db.session.query(m).filter( m.namespace_id == namespace_id).count() print "count for", m, ":", c assert c != 0 fake_account = add_generic_imap_account(db.session) fake_account_id = fake_account.id assert fake_account_id != account.id and \ fake_account.namespace.id != namespace_id thread = add_fake_thread(db.session, fake_account.namespace.id) thread_id = thread.id message = add_fake_message(db.session, fake_account.namespace.id, thread) message_id = message.id # Delete namespace, verify data corresponding to this namespace /only/ # is deleted delete_namespace(account_id, namespace_id) db.session.commit() account = db.session.query(Account).get(account_id) assert not account for m in models: assert db.session.query(m).filter( m.namespace_id == namespace_id).count() == 0 fake_account = db.session.query(Account).get(fake_account_id) assert fake_account thread = db.session.query(Thread).get(thread_id) message = db.session.query(Message).get(message_id) assert thread and message
def test_actions_for_invalid_accounts_are_skipped(purge_accounts_and_actions, patched_worker): with session_scope_by_shard_id(0) as db_session: account = add_generic_imap_account( db_session, email_address='*****@*****.**') schedule_test_action(db_session, account) namespace_id = account.namespace.id count = db_session.query(ActionLog).filter( ActionLog.namespace_id == namespace_id).count() assert account.sync_state != 'invalid' another_account = add_generic_imap_account( db_session, email_address='*****@*****.**') schedule_test_action(db_session, another_account) another_namespace_id = another_account.namespace.id another_count = db_session.query(ActionLog).filter( ActionLog.namespace_id == another_namespace_id).count() assert another_account.sync_state != 'invalid' account.mark_invalid() db_session.commit() service = SyncbackService( syncback_id=0, process_number=0, total_processes=2) service._process_log() while len(service.workers) >= 1: gevent.sleep(0.1) gevent.killall(service.workers) with session_scope_by_shard_id(0) as db_session: q = db_session.query(ActionLog).filter( ActionLog.namespace_id == namespace_id, ActionLog.status == 'pending') assert q.count() == count q = db_session.query(ActionLog).filter( ActionLog.namespace_id == another_namespace_id) assert q.filter(ActionLog.status == 'pending').count() == 0 assert q.filter(ActionLog.status == 'successful').count() == another_count
def test_namespace_delete_cascade(db, default_account): from inbox.models import Account, Message, Thread models = [Thread, Message] namespace = default_account.namespace namespace_id = namespace.id account_id = default_account.id account = db.session.query(Account).get(account_id) assert account thread = add_fake_thread(db.session, namespace_id) add_fake_message(db.session, namespace_id, thread) for m in models: c = db.session.query(m).filter(m.namespace_id == namespace_id).count() print "count for", m, ":", c assert c != 0 fake_account = add_generic_imap_account(db.session) fake_account_id = fake_account.id assert fake_account_id != account.id and fake_account.namespace.id != namespace_id thread = add_fake_thread(db.session, fake_account.namespace.id) add_fake_message(db.session, fake_account.namespace.id, thread) assert ( len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) > 0 ) # This test is separate from test_namespace_deletion because we want to # do a raw SQLAlchemy delete rather than using delete_namespace, which does # a bunch of extra work to ensure that objects associated with a Namespace # are actually deleted. db.session.query(Namespace).filter(Namespace.id == namespace_id).delete() db.session.commit() assert ( len(db.session.query(Namespace).filter(Namespace.id == namespace_id).all()) == 0 )
def test_actions_claimed_by_a_single_service(purge_accounts_and_actions, patched_worker): actionlogs = [] for key in (0, 1): with session_scope_by_shard_id(key) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(key)) schedule_test_action(db_session, account) actionlogs += [db_session.query(ActionLog).one().id] services = [] for cpu_id in (0, 1): service = SyncbackService(cpu_id=cpu_id, total_cpus=2) service.workers = set() service._process_log() services.append(service) for i, service in enumerate(services): assert len(service.workers) == 1 assert list(service.workers)[0].action_log_id == actionlogs[i] gevent.joinall(list(service.workers))
def test_actions_claimed_by_a_single_service(purge_accounts_and_actions, patched_task): actionlogs = [] for key in (0, 1): with session_scope_by_shard_id(key) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(key)) schedule_test_action(db_session, account) actionlogs += [db_session.query(ActionLog).one().id] services = [] for process_number in (0, 1): service = SyncbackService(syncback_id=0, process_number=process_number, total_processes=2, num_workers=2) service._process_log() services.append(service) for i, service in enumerate(services): assert service.task_queue.qsize() == 1 assert service.task_queue.peek().action_log_ids() == [actionlogs[i]]
def test_actions_claimed_by_a_single_service(purge_accounts_and_actions, patched_task): actionlogs = [] for key in (0, 1): with session_scope_by_shard_id(key) as db_session: account = add_generic_imap_account( db_session, email_address='{}@test.com'.format(key)) schedule_test_action(db_session, account) actionlogs += [db_session.query(ActionLog).one().id] services = [] for process_number in (0, 1): service = SyncbackService( syncback_id=0, process_number=process_number, total_processes=2, num_workers=2) service._process_log() services.append(service) for i, service in enumerate(services): assert service.task_queue.qsize() == 1 assert service.task_queue.peek().action_log_ids() == [actionlogs[i]]