def test_expanded_message(stub_message, api_client): def _check_json_message(msg_dict): assert 'body' in msg_dict assert msg_dict['object'] == 'message' assert msg_dict['thread_id'] == stub_message.thread.public_id assert isinstance(msg_dict['headers'], dict) assert 'In-Reply-To' in msg_dict['headers'] assert 'References' in msg_dict['headers'] assert 'Message-Id' in msg_dict['headers'] valid_keys = ['account_id', 'to', 'from', 'files', 'unread', 'unread', 'date', 'snippet'] assert all(x in msg_dict for x in valid_keys) # /message/<message_id> resp = api_client.get_raw( '/messages/{}?view=expanded'.format(stub_message.public_id)) assert resp.status_code == 200 resp_dict = json.loads(resp.data) _check_json_message(resp_dict) # /messages/ resp = api_client.get_raw('/messages/?view=expanded') assert resp.status_code == 200 resp_dict = json.loads(resp.data) for message_json in resp_dict: if message_json['id'] == stub_message.public_id: _check_json_message(message_json)
def test_expanded_message(stub_message, api_client): def _check_json_message(msg_dict): assert 'body' in msg_dict assert msg_dict['object'] == 'message' assert msg_dict['thread_id'] == stub_message.thread.public_id assert isinstance(msg_dict['headers'], dict) assert 'In-Reply-To' in msg_dict['headers'] assert 'References' in msg_dict['headers'] assert 'Message-Id' in msg_dict['headers'] valid_keys = [ 'account_id', 'to', 'from', 'files', 'unread', 'unread', 'date', 'snippet' ] assert all(x in msg_dict for x in valid_keys) # /message/<message_id> resp = api_client.get_raw('/messages/{}?view=expanded'.format( stub_message.public_id)) assert resp.status_code == 200 resp_dict = json.loads(resp.data) _check_json_message(resp_dict) # /messages/ resp = api_client.get_raw('/messages/?view=expanded') assert resp.status_code == 200 resp_dict = json.loads(resp.data) for message_json in resp_dict: if message_json['id'] == stub_message.public_id: _check_json_message(message_json)
def test_search_endpoints(db, api_client, token_manager, search_response, default_account): # Message, thread search succeeds. for endpoint in ("messages", "threads"): r = api_client.get_raw("/{}/search?q=queryme".format(endpoint)) assert r.status_code == 200 default_account.sync_state = "invalid" db.session.commit() # Message, thread search on an invalid account fails with an HTTP 403. for endpoint in ("messages", "threads"): r = api_client.get_raw("/{}/search?q=queryme".format(endpoint)) assert r.status_code == 403
def test_expanded_threads(stub_message, api_client, api_version): def _check_json_thread(resp_dict): assert 'message_ids' not in resp_dict assert 'messages' in resp_dict assert 'drafts' in resp_dict assert len(resp_dict['participants']) == 3 assert len(resp_dict['messages']) == 2 assert len(resp_dict['drafts']) == 1 for msg_dict in resp_dict['messages']: assert 'body' not in msg_dict assert msg_dict['object'] == 'message' assert msg_dict['thread_id'] == stub_message.thread.public_id valid_keys = [ 'account_id', 'to', 'from', 'files', 'unread', 'unread', 'date', 'snippet' ] assert all(x in msg_dict for x in valid_keys) for draft in resp_dict['drafts']: assert 'body' not in draft assert draft['object'] == 'draft' assert draft['thread_id'] == stub_message.thread.public_id valid_keys = [ 'account_id', 'to', 'from', 'files', 'unread', 'snippet', 'date', 'version', 'reply_to_message_id' ] assert all(x in draft for x in valid_keys) headers = dict() headers['Api-Version'] = api_version # /threads/<thread_id> resp = api_client.get_raw('/threads/{}?view=expanded'.format( stub_message.thread.public_id), headers=headers) assert resp.status_code == 200 resp_dict = json.loads(resp.data) _check_json_thread(resp_dict) # /threads/ resp = api_client.get_raw('/threads/?view=expanded'.format( stub_message.thread.public_id), headers=headers) assert resp.status_code == 200 resp_dict = json.loads(resp.data) for thread_json in resp_dict: if thread_json['id'] == stub_message.thread.public_id: _check_json_thread(thread_json)
def test_search_endpoints(db, api_client, token_manager, search_response, default_account): # Message, thread search succeeds. for endpoint in ('messages', 'threads'): r = api_client.get_raw('/{}/search?q=queryme'.format(endpoint)) assert r.status_code == 200 default_account.sync_state = 'invalid' db.session.commit() # Message, thread search on an invalid account fails with an HTTP 403. for endpoint in ('messages', 'threads'): r = api_client.get_raw('/{}/search?q=queryme'.format(endpoint)) assert r.status_code == 403
def test_invalid_gmail_account_search(db, api_client, default_account, invalid_gmail_token, patch_gmail_search_response, sorted_gmail_messages, is_streaming): if is_streaming: response = api_client.get_raw('/messages/search/streaming?' 'q=blah%20blah%20blah') else: response = api_client.get_raw('/messages/search?' 'q=blah%20blah%20blah') assert response.status_code == 403 assert "This search can\'t be performed because the account\'s "\ "credentials are out of date." in json.loads(response.data)['message']
def test_thread_count(db, api_client, default_account): date1 = datetime.datetime(2015, 1, 1, 0, 0, 0) date2 = datetime.datetime(2012, 1, 1, 0, 0, 0) date3 = datetime.datetime(2010, 1, 1, 0, 0, 0) date4 = datetime.datetime(2009, 1, 1, 0, 0, 0) date5 = datetime.datetime(2008, 1, 1, 0, 0, 0) thread1 = add_fake_thread(db.session, default_account.namespace.id) thread2 = add_fake_thread(db.session, default_account.namespace.id) test_subject = "test_thread_view_count_with_category" for thread in [thread1, thread2]: add_fake_message(db.session, default_account.namespace.id, thread, subject=test_subject, received_date=date1) add_fake_message(db.session, default_account.namespace.id, thread, subject=test_subject, received_date=date2, add_sent_category=True) add_fake_message(db.session, default_account.namespace.id, thread, subject=test_subject, received_date=date3) add_fake_message(db.session, default_account.namespace.id, thread, subject=test_subject, received_date=date4, add_sent_category=True) add_fake_message(db.session, default_account.namespace.id, thread, subject=test_subject, received_date=date5) resp = api_client.get_raw('/threads/?view=count&in=sent') assert resp.status_code == 200 threads = json.loads(resp.data) assert threads['count'] == 2
def test_thread_sent_recent_date(db, api_client, default_account): date1 = datetime.datetime(2015, 1, 1, 0, 0, 0) date2 = datetime.datetime(2012, 1, 1, 0, 0, 0) date3 = datetime.datetime(2010, 1, 1, 0, 0, 0) date4 = datetime.datetime(2009, 1, 1, 0, 0, 0) date5 = datetime.datetime(2008, 1, 1, 0, 0, 0) thread1 = add_fake_thread(db.session, default_account.namespace.id) test_subject = "test_thread_sent_recent_date" add_fake_message(db.session, default_account.namespace.id, thread1, subject=test_subject, received_date=date1) add_fake_message(db.session, default_account.namespace.id, thread1, subject=test_subject, received_date=date2, add_sent_category=True) add_fake_message(db.session, default_account.namespace.id, thread1, subject=test_subject, received_date=date3) add_fake_message(db.session, default_account.namespace.id, thread1, subject=test_subject, received_date=date4, add_sent_category=True) add_fake_message(db.session, default_account.namespace.id, thread1, subject=test_subject, received_date=date5) resp = api_client.get_raw('/threads/') assert resp.status_code == 200 threads = json.loads(resp.data) for thread in threads: # should only be one assert datetime.datetime.fromtimestamp( thread['last_message_sent_timestamp']) == date2
def test_thread_received_recent_date(db, api_client, default_account): date1 = datetime.datetime(2015, 1, 1, 0, 0, 0) date2 = datetime.datetime(2012, 1, 1, 0, 0, 0) thread1 = add_fake_thread(db.session, default_account.namespace.id) date_dict = dict() add_fake_message(db.session, default_account.namespace.id, thread1, subject="Test Thread 1", received_date=date1, add_sent_category=True) add_fake_message(db.session, default_account.namespace.id, thread1, subject="Test Thread 1", received_date=date2) date_dict["Test Thread 1"] = date2 thread2 = add_fake_thread(db.session, default_account.namespace.id) add_fake_message(db.session, default_account.namespace.id, thread2, subject="Test Thread 2", received_date=date1, add_sent_category=True) date_dict["Test Thread 2"] = date1 resp = api_client.get_raw('/threads/') assert resp.status_code == 200 threads = json.loads(resp.data) for thread in threads: assert date_dict[thread['subject']] == \ datetime.datetime.fromtimestamp( thread['last_message_received_timestamp'])
def test_api_expand_recurring_message(db, api_client, message, recurring_event): # This is a regression test for https://phab.nylas.com/T3556 # ("InflatedEvent should not be committed" exception in API"). event = recurring_event event.message = message db.session.commit() events = api_client.get_data('/events?expand_recurring=false') assert len(events) == 1 # Make sure the recurrence info is on the recurring event for e in events: if e['title'] == 'recurring-weekly': assert e.get('recurrence') is not None assert e.get('message_id') is not None r = api_client.get_raw('/events?expand_recurring=true') assert r.status_code == 200 all_events = api_client.get_data('/events?expand_recurring=true') assert len(all_events) != 0 for event in all_events: assert event['master_event_id'] is not None assert 'message_id' not in event
def test_direct_fetching(api_client, db, message, fake_attachment, monkeypatch): # Mark a file as missing and check that we try to # fetch it from the remote provider. get_mock = mock.Mock(return_value=None) monkeypatch.setattr('inbox.util.blockstore.get_from_blockstore', get_mock) save_mock = mock.Mock() monkeypatch.setattr('inbox.util.blockstore.save_to_blockstore', save_mock) # Mock the request to return the contents of an actual email. path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data', 'raw_message_with_filename_attachment.txt') data = "" with open(path) as fd: data = fd.read() raw_mock = mock.Mock(return_value=data) monkeypatch.setattr('inbox.s3.backends.gmail.get_gmail_raw_contents', raw_mock) resp = api_client.get_raw('/files/{}/download'.format(fake_attachment.block.public_id)) for m in [get_mock, save_mock, raw_mock]: assert m.called # Check that we got back the right data, with the right headers. assert resp.headers['Content-Disposition'] == 'attachment; filename=zambla.txt' assert resp.data.decode("utf8") == u'Chuis pas rassur\xe9'
def test_direct_fetching(api_client, db, message, fake_attachment, monkeypatch): # Mark a file as missing and check that we try to # fetch it from the remote provider. get_mock = mock.Mock(return_value=None) monkeypatch.setattr('inbox.util.blockstore.get_from_blockstore', get_mock) save_mock = mock.Mock() monkeypatch.setattr('inbox.util.blockstore.save_to_blockstore', save_mock) # Mock the request to return the contents of an actual email. path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data', 'raw_message_with_filename_attachment.txt') data = "" with open(path) as fd: data = fd.read() raw_mock = mock.Mock(return_value=data) monkeypatch.setattr('inbox.s3.backends.gmail.get_gmail_raw_contents', raw_mock) resp = api_client.get_raw('/files/{}/download'.format( fake_attachment.block.public_id)) for m in [get_mock, save_mock, raw_mock]: assert m.called # Check that we got back the right data, with the right headers. assert resp.headers[ 'Content-Disposition'] == 'attachment; filename=zambla.txt' assert resp.data.decode("utf8") == u'Chuis pas rassur\xe9'
def test_direct_fetching(stub_message_from_raw, api_client, mime_message, monkeypatch): # Mark a message as missing and check that we try to # fetch it from the remote provider. get_mock = mock.Mock(return_value=None) monkeypatch.setattr('inbox.util.blockstore.get_from_blockstore', get_mock) save_mock = mock.Mock() monkeypatch.setattr('inbox.util.blockstore.save_to_blockstore', save_mock) raw_mock = mock.Mock(return_value='Return contents') monkeypatch.setattr('inbox.s3.backends.gmail.get_gmail_raw_contents', raw_mock) full_path = '/messages/{}'.format(stub_message_from_raw.public_id) resp = api_client.get_raw(full_path, headers={'Accept': 'message/rfc822'}) for m in [get_mock, save_mock, raw_mock]: assert m.called assert resp.data == 'Return contents'
def test_rfc822_format(stub_message_from_raw, api_client, mime_message): """ Test the API response to retreive raw message contents """ full_path = '/messages/{}'.format(stub_message_from_raw.public_id) resp = api_client.get_raw(full_path, headers={'Accept': 'message/rfc822'}) assert resp.data == get_from_blockstore(stub_message_from_raw.data_sha256)
def test_expanded_threads(stub_message, api_client, api_version): def _check_json_thread(resp_dict): assert 'message_ids' not in resp_dict assert 'messages' in resp_dict assert 'drafts' in resp_dict assert len(resp_dict['participants']) == 3 assert len(resp_dict['messages']) == 2 assert len(resp_dict['drafts']) == 1 for msg_dict in resp_dict['messages']: assert 'body' not in msg_dict assert msg_dict['object'] == 'message' assert msg_dict['thread_id'] == stub_message.thread.public_id valid_keys = ['account_id', 'to', 'from', 'files', 'unread', 'unread', 'date', 'snippet'] assert all(x in msg_dict for x in valid_keys) for draft in resp_dict['drafts']: assert 'body' not in draft assert draft['object'] == 'draft' assert draft['thread_id'] == stub_message.thread.public_id valid_keys = ['account_id', 'to', 'from', 'files', 'unread', 'snippet', 'date', 'version', 'reply_to_message_id'] assert all(x in draft for x in valid_keys) headers = dict() headers['Api-Version'] = api_version # /threads/<thread_id> resp = api_client.get_raw( '/threads/{}?view=expanded'.format(stub_message.thread.public_id), headers=headers) assert resp.status_code == 200 resp_dict = json.loads(resp.data) _check_json_thread(resp_dict) # /threads/ resp = api_client.get_raw( '/threads/?view=expanded'.format(stub_message.thread.public_id), headers=headers) assert resp.status_code == 200 resp_dict = json.loads(resp.data) for thread_json in resp_dict: if thread_json['id'] == stub_message.thread.public_id: _check_json_thread(thread_json)
def test_empty_response_when_latest_cursor_given(db, api_client, default_namespace): cursor = get_cursor(api_client, int(time.time() + 22), default_namespace) url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': cursor}) r = api_client.get_raw(url) assert r.status_code == 200 assert r.data.strip() == ''
def test_response_when_old_cursor_given(db, api_client, default_namespace): url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': '0'}) r = api_client.get_raw(url) assert r.status_code == 200 responses = r.data.split('\n') for response_string in responses: if response_string: validate_response_format(response_string)
def label_client(db, gmail_account): api_client = new_api_client(db, gmail_account.namespace) # Whereas calling generic_account always makes a new IMAP account, # calling gmail_account checks first to see if there's an existing # Gmail account and uses it if so. This can cause namespace # conflicts if a label is "created" more than once. Since # labels can't be deleted and then re-created, this fixture only # makes a new label if there are no existing labels. g_data = api_client.get_raw('/labels/') if not json.loads(g_data.data): api_client.post_data('/labels/', {"display_name": "Test_Label"}) return api_client
def test_expanded_view(db, api_client, thread, message, default_namespace): url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': '0', 'include_types': 'message,thread', 'view': 'expanded'}) r = api_client.get_raw(url) assert r.status_code == 200 responses = r.data.split('\n') parsed_responses = [json.loads(resp) for resp in responses if resp != ''] for delta in parsed_responses: if delta['object'] == 'message': assert 'headers' in delta['attributes'] elif delta['object'] == 'thread': assert 'messages' in delta['attributes']
def test_read_endpoints(db, setup_account, api_client, default_account): # Read operations succeed. for resource, public_id in setup_account.items(): endpoint = "/{}s".format(resource) r = api_client.get_raw(endpoint) assert r.status_code == 200 read_endpoint = "{}/{}".format(endpoint, public_id) r = api_client.get_raw(read_endpoint) assert r.status_code == 200 default_account.sync_state = "invalid" db.session.commit() # Read operations on an invalid account also succeed. for resource, public_id in setup_account.items(): endpoint = "/{}s".format(resource) r = api_client.get_raw(endpoint) assert r.status_code == 200 read_endpoint = "{}/{}".format(endpoint, public_id) r = api_client.get_raw(read_endpoint) assert r.status_code == 200
def test_read_endpoints(db, setup_account, api_client, default_account): # Read operations succeed. for resource, public_id in setup_account.items(): endpoint = '/{}s'.format(resource) r = api_client.get_raw(endpoint) assert r.status_code == 200 read_endpoint = '{}/{}'.format(endpoint, public_id) r = api_client.get_raw(read_endpoint) assert r.status_code == 200 default_account.sync_state = 'invalid' db.session.commit() # Read operations on an invalid account also succeed. for resource, public_id in setup_account.items(): endpoint = '/{}s'.format(resource) r = api_client.get_raw(endpoint) assert r.status_code == 200 read_endpoint = '{}/{}'.format(endpoint, public_id) r = api_client.get_raw(read_endpoint) assert r.status_code == 200
def test_exclude_and_include_object_types(db, api_client, thread, default_namespace): add_fake_message(db.session, default_namespace.id, thread, from_addr=[('Bob', '*****@*****.**')]) # Check that we do get message and contact changes by default. url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': '0'}) r = api_client.get_raw(url) assert r.status_code == 200 responses = r.data.split('\n') parsed_responses = [json.loads(resp) for resp in responses if resp != ''] assert any(resp['object'] == 'message' for resp in parsed_responses) assert any(resp['object'] == 'contact' for resp in parsed_responses) # And check that we don't get message/contact changes if we exclude them. url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': '0', 'exclude_types': 'message,contact'}) r = api_client.get_raw(url) assert r.status_code == 200 responses = r.data.split('\n') parsed_responses = [json.loads(resp) for resp in responses if resp != ''] assert not any(resp['object'] == 'message' for resp in parsed_responses) assert not any(resp['object'] == 'contact' for resp in parsed_responses) # And check we only get message objects if we use include_types url = url_concat('/delta/streaming', {'timeout': .1, 'cursor': '0', 'include_types': 'message'}) r = api_client.get_raw(url) assert r.status_code == 200 responses = r.data.split('\n') parsed_responses = [json.loads(resp) for resp in responses if resp != ''] assert all(resp['object'] == 'message' for resp in parsed_responses)
def test_sender_and_participants(stub_message, api_client, api_version): headers = dict() headers['Api-Version'] = api_version resp = api_client.get_raw('/threads/{}' .format(stub_message.thread.public_id), headers=headers) assert resp.status_code == 200 resp_dict = json.loads(resp.data) participants = resp_dict['participants'] assert len(participants) == 3 # Not expanded, should only return IDs assert 'message' not in resp_dict assert 'drafts' not in resp_dict
def test_sender_and_participants(stub_message, api_client, api_version): headers = dict() headers['Api-Version'] = api_version resp = api_client.get_raw('/threads/{}'.format( stub_message.thread.public_id), headers=headers) assert resp.status_code == 200 resp_dict = json.loads(resp.data) participants = resp_dict['participants'] assert len(participants) == 3 # Not expanded, should only return IDs assert 'message' not in resp_dict assert 'drafts' not in resp_dict
def test_longpoll_delta_timeout(db, api_client, default_namespace): test_timeout = 2 cursor = get_cursor(api_client, int(time.time() + 22), default_namespace) url = url_concat('/delta/longpoll', {'timeout': test_timeout, 'cursor': cursor}) start_time = time.time() resp = api_client.get_raw(url) end_time = time.time() assert resp.status_code == 200 assert end_time - start_time - test_timeout < GEVENT_EPSILON parsed_responses = json.loads(resp.data) assert len(parsed_responses['deltas']) == 0 assert type(parsed_responses['deltas']) == list assert parsed_responses['cursor_start'] == cursor assert parsed_responses['cursor_end'] == cursor
def test_download(api_client, uploaded_file_ids, filename): # See comment in uploaded_file_ids() original_filename = filename if filename == 'piece-jointe.jpg': filename = u'pièce-jointe.jpg' elif filename == 'andra-moi-ennepe.txt': filename = u'ἄνδρα μοι ἔννεπε' elif filename == 'long-non-ascii-filename.txt': filename = 100 * u'μ' in_file = api_client.get_data(u'/files?filename={}'.format(filename))[0] data = api_client.get_raw('/files/{}/download'.format(in_file['id'])).data path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data', original_filename.encode('utf-8')) local_data = open(path, 'rb').read() local_md5 = md5.new(local_data).digest() dl_md5 = md5.new(data).digest() assert local_md5 == dl_md5
def test_handle_not_found_calendar(api_client): resp_data = api_client.get_raw('/calendars/foo') assert resp_data.status_code == 404
def test_strict_argument_parsing(api_client): r = api_client.get_raw('/threads?foo=bar') assert r.status_code == 400
def test_contact_groups(db, api_client, default_namespace): # Clear cached data (if it exists) namespace_id = default_namespace.id try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() cached_data.contact_groups_last_updated = None db.session.add(cached_data) db.session.commit() except NoResultFound: pass # Send some emails namespace_email = default_namespace.email_address me = ('me', namespace_email) recipients = ([[('a', '*****@*****.**'), ('b', '*****@*****.**'), ('c', '*****@*****.**')]] * 8 + [[('b', '*****@*****.**'), ('c', '*****@*****.**'), ('d', '*****@*****.**')]] * 8 + [[('d', '*****@*****.**'), ('e', '*****@*****.**'), ('f', '*****@*****.**')]] * 8 + [[('g', '*****@*****.**'), ('h', '*****@*****.**'), ('i', '*****@*****.**'), ('j', '*****@*****.**')]] * 5 + [[('g', '*****@*****.**'), ('h', '*****@*****.**'), ('i', '*****@*****.**')]] * 2 + [[('k', '*****@*****.**'), ('l', '*****@*****.**')]] * 3) for recipients_list in recipients: fake_thread = add_fake_thread(db.session, namespace_id) add_fake_message(db.session, namespace_id, fake_thread, subject='Froop', from_addr=[me], to_addr=recipients_list, add_sent_category=True) # Check contact groups resp = api_client.get_raw('/groups/intrinsic?force_recalculate=true') assert resp.status_code == 200 groups_scores = {g: s for (g, s) in json.loads(resp.data)} groups = ['[email protected], [email protected], [email protected], [email protected]', '[email protected], [email protected], [email protected]', '[email protected], [email protected], [email protected], [email protected]', '[email protected], [email protected]'] for g in groups: assert g in groups_scores # make sure it works when we do it again resp = api_client.get_raw('/groups/intrinsic') assert resp.status_code == 200 groups_scores = {g: s for (g, s) in json.loads(resp.data)} for g in groups: assert g in groups_scores try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() assert cached_data.contact_groups_last_updated is not None except (NoResultFound, AssertionError): assert False, "Contact groups not cached"
def test_contact_rankings(db, api_client, default_namespace): # Clear cached data (if it exists) namespace_id = default_namespace.id try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() cached_data.contact_rankings_last_updated = None db.session.add(cached_data) db.session.commit() except NoResultFound: pass # Send some emails namespace_email = default_namespace.email_address me = ('me', namespace_email) recipients = ([[('first', '*****@*****.**')]] * 8 + [[('second', '*****@*****.**')]] * 4 + [[('third', '*****@*****.**')]] + [[('third', '*****@*****.**'), ('fourth', '*****@*****.**')]]) for recipients_list in recipients: fake_thread = add_fake_thread(db.session, namespace_id) add_fake_message(db.session, namespace_id, fake_thread, subject='Froop', from_addr=[me], to_addr=recipients_list, add_sent_category=True) # Check contact rankings resp = api_client.get_raw( '/contacts/rankings?force_recalculate=true') assert resp.status_code == 200 emails_scores = {e: s for (e, s) in json.loads(resp.data)} emails = ['*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**'] for email in emails: assert email in emails_scores for e1, e2 in zip(emails, emails[1:]): assert emails_scores[e1] > emails_scores[e2] # make sure it works if we call it again! resp = api_client.get_raw('/contacts/rankings') assert resp.status_code == 200 emails_scores = {e: s for (e, s) in json.loads(resp.data)} emails = ['*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**'] for email in emails: assert email in emails_scores for e1, e2 in zip(emails, emails[1:]): assert emails_scores[e1] > emails_scores[e2] try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() assert cached_data.contact_rankings_last_updated is not None except (NoResultFound, AssertionError): assert False, "Contact rankings not cached"
def test_contact_rankings(db, api_client, default_namespace): # Clear cached data (if it exists) namespace_id = default_namespace.id try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() cached_data.contact_rankings_last_updated = None db.session.add(cached_data) db.session.commit() except NoResultFound: pass # Send some emails namespace_email = default_namespace.email_address me = ('me', namespace_email) recipients = ([[('first', '*****@*****.**')]] * 8 + [[('second', '*****@*****.**')]] * 4 + [[('third', '*****@*****.**')]] + [[('third', '*****@*****.**'), ('fourth', '*****@*****.**')]]) for recipients_list in recipients: fake_thread = add_fake_thread(db.session, namespace_id) add_fake_message(db.session, namespace_id, fake_thread, subject='Froop', from_addr=[me], to_addr=recipients_list, add_sent_category=True) # Check contact rankings resp = api_client.get_raw('/contacts/rankings?force_recalculate=true') assert resp.status_code == 200 emails_scores = {e: s for (e, s) in json.loads(resp.data)} emails = [ '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ] for email in emails: assert email in emails_scores for e1, e2 in zip(emails, emails[1:]): assert emails_scores[e1] > emails_scores[e2] # make sure it works if we call it again! resp = api_client.get_raw('/contacts/rankings') assert resp.status_code == 200 emails_scores = {e: s for (e, s) in json.loads(resp.data)} emails = [ '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ] for email in emails: assert email in emails_scores for e1, e2 in zip(emails, emails[1:]): assert emails_scores[e1] > emails_scores[e2] try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() assert cached_data.contact_rankings_last_updated is not None except (NoResultFound, AssertionError): assert False, "Contact rankings not cached"
def test_contact_groups(db, api_client, default_namespace): # Clear cached data (if it exists) namespace_id = default_namespace.id try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() cached_data.contact_groups_last_updated = None db.session.add(cached_data) db.session.commit() except NoResultFound: pass # Send some emails namespace_email = default_namespace.email_address me = ('me', namespace_email) recipients = ([[('a', '*****@*****.**'), ('b', '*****@*****.**'), ('c', '*****@*****.**')]] * 8 + [[('b', '*****@*****.**'), ('c', '*****@*****.**'), ('d', '*****@*****.**')]] * 8 + [[('d', '*****@*****.**'), ('e', '*****@*****.**'), ('f', '*****@*****.**')]] * 8 + [[('g', '*****@*****.**'), ('h', '*****@*****.**'), ('i', '*****@*****.**'), ('j', '*****@*****.**')]] * 5 + [[('g', '*****@*****.**'), ('h', '*****@*****.**'), ('i', '*****@*****.**')]] * 2 + [[('k', '*****@*****.**'), ('l', '*****@*****.**')]] * 3) for recipients_list in recipients: fake_thread = add_fake_thread(db.session, namespace_id) add_fake_message(db.session, namespace_id, fake_thread, subject='Froop', from_addr=[me], to_addr=recipients_list, add_sent_category=True) # Check contact groups resp = api_client.get_raw('/groups/intrinsic?force_recalculate=true') assert resp.status_code == 200 groups_scores = {g: s for (g, s) in json.loads(resp.data)} groups = [ '[email protected], [email protected], [email protected], [email protected]', '[email protected], [email protected], [email protected]', '[email protected], [email protected], [email protected], [email protected]', '[email protected], [email protected]' ] for g in groups: assert g in groups_scores # make sure it works when we do it again resp = api_client.get_raw('/groups/intrinsic') assert resp.status_code == 200 groups_scores = {g: s for (g, s) in json.loads(resp.data)} for g in groups: assert g in groups_scores try: cached_data = db.session.query(DataProcessingCache) \ .filter(DataProcessingCache.namespace_id == namespace_id).one() assert cached_data.contact_groups_last_updated is not None except (NoResultFound, AssertionError): assert False, "Contact groups not cached"