def test_ping(random_heartbeats): # Get the lightweight ping (only checks indices) and make sure it conforms # to the expected format. ping = get_ping_status(list(range(10))) assert isinstance(ping, dict) assert sorted(ping.keys()) == sorted(random_heartbeats.keys()) single = ping[0] attrs = ("id", "folders") for attr in attrs: assert hasattr(single, attr) for f in single.folders: assert f.alive
def test_ping(random_heartbeats): # Get the lightweight ping (only checks indices) and make sure it conforms # to the expected format. ping = get_ping_status(range(10)) assert isinstance(ping, dict) assert sorted(ping.keys()) == sorted(random_heartbeats.keys()) single = ping[0] attrs = ('id', 'folders') for attr in attrs: assert hasattr(single, attr) for f in single.folders: assert f.alive
def test_ping(random_heartbeats): # Get the lightweight ping (only checks indices) and make sure it conforms # to the expected format. ping = get_ping_status() assert isinstance(ping, dict) assert sorted(ping.keys()) == sorted(random_heartbeats.keys()) single = ping[0] attrs = ('id', 'alive', 'timestamp', 'folders') for attr in attrs: assert hasattr(single, attr) assert single.alive for f in single.folders: assert f.alive
def test_ping_single(random_heartbeats): ping = get_ping_status(0) assert isinstance(ping, dict) single = ping[0] assert single.alive
def test_ping_single(random_heartbeats): ping = get_ping_status([0]) assert isinstance(ping, dict) single = ping[0] for f in single.folders: assert f.alive
def index(): with global_session_scope() as db_session: if "namespace_id" in request.args: try: namespace = ( db_session.query(Namespace) .filter(Namespace.public_id == request.args["namespace_id"]) .one() ) except NoResultFound: return APIEncoder().jsonify([]) else: namespace = None accounts = db_session.query(ImapAccount).with_polymorphic([GenericAccount]) if namespace: accounts = accounts.filter(Account.namespace == namespace) else: # Get all account IDs that aren't deleted account_ids = [ result[0] for result in db_session.query(ImapAccount.id, ImapAccount._sync_status) if result[1].get("sync_disabled_reason") != "account deleted" ] # This is faster than fetching all accounts. accounts = accounts.filter(ImapAccount.id.in_(account_ids)) accounts = list(accounts) folder_data = _get_folder_data(db_session, accounts) calendar_data = _get_calendar_data(db_session, namespace) heartbeat = get_ping_status(account_ids=[acc.id for acc in accounts]) data = [] for account in accounts: if account.id in heartbeat: account_heartbeat = heartbeat[account.id] account_folder_data = folder_data[account.id] account_calendar_data = calendar_data[account.id] events_alive = False for folder_status in account_heartbeat.folders: folder_status_id = int(folder_status.id) if folder_status_id in account_folder_data: account_folder_data[folder_status_id].update( { "alive": folder_status.alive, "heartbeat_at": folder_status.timestamp, } ) elif folder_status_id == EVENT_SYNC_FOLDER_ID: events_alive = folder_status.alive email_alive = all(f["alive"] for f in account_folder_data.values()) alive = True if account.sync_email and not email_alive: alive = False if account.sync_events and not events_alive: alive = False email_initial_sync = any( f["state"] == "initial" for f in account_folder_data.values() ) events_initial_sync = any( c["state"] == "initial" for c in account_calendar_data ) initial_sync = email_initial_sync or events_initial_sync total_uids = sum( f["remote_uid_count"] or 0 for f in account_folder_data.values() ) remaining_uids = sum( f["download_uid_count"] or 0 for f in account_folder_data.values() ) if total_uids: progress = 100.0 / total_uids * (total_uids - remaining_uids) else: progress = None else: alive = False email_initial_sync = None events_initial_sync = None initial_sync = None progress = None sync_status = account.sync_status is_running = sync_status["state"] == "running" if ( is_running and not sync_status.get("sync_start_time") and not sync_status.get("sync_error") ): sync_status_str = "starting" elif is_running and alive: if initial_sync: sync_status_str = "initial" else: sync_status_str = "running" elif is_running: # Nylas is syncing, but not all heartbeats are reporting. sync_status_str = "delayed" else: # Nylas is no longer syncing this account. sync_status_str = "dead" data.append( { "account_private_id": account.id, "namespace_private_id": account.namespace.id, "account_id": account.public_id, "namespace_id": account.namespace.public_id, "events_alive": events_alive, "email_alive": email_alive, "alive": alive, "email_initial_sync": email_initial_sync, "events_initial_sync": events_initial_sync, "initial_sync": initial_sync, "provider_name": account.provider, "email_address": account.email_address, "folders": sorted( folder_data[account.id].values(), key=itemgetter("name") ), "calendars": sorted( calendar_data[account.id], key=itemgetter("name") ), "sync_email": account.sync_email, "sync_events": account.sync_events, "sync_status": sync_status_str, "sync_error": sync_status.get("sync_error"), "sync_end_time": sync_status.get("sync_end_time"), "sync_disabled_reason": sync_status.get("sync_disabled_reason"), "sync_host": account.sync_host, "progress": progress, "throttled": account.throttled, "created_at": account.created_at, "updated_at": account.updated_at, } ) return APIEncoder().jsonify(data)