async def test_negative(mock_responses): server_url = "http://fake.local/v1" with patch_async(f"{MODULE}.fetch_signed_resources", return_value=RESOURCES): with patch_async(f"{MODULE}.get_signature_age_hours", return_value=5): status, data = await run(server_url, FAKE_AUTH, max_age=4) assert status is False assert data == {"bid/cid": 5}
async def test_positive(mock_responses): server_url = "http://fake.local/v1" module = "checks.remotesettings.signatures_age" with patch_async(f"{module}.fetch_signed_resources", return_value=RESOURCES): with patch_async(f"{module}.get_signature_age_hours", return_value=3): status, data = await run(server_url, FAKE_AUTH, max_age=4) assert status is True assert data == {}
async def test_positive(mock_responses): server_url = "http://fake.local/v1" module = "checks.remotesettings.collections_consistency" with patch_async(f"{module}.fetch_signed_resources", return_value=RESOURCES): with patch_async(f"{module}.has_inconsistencies", return_value=None): status, data = await run(server_url, FAKE_AUTH) assert status is True assert data == {}
async def test_negative(mock_responses): server_url = "http://fake.local/v1" m = "checks.remotesettings.collections_consistency" with patch_async(f"{m}.fetch_signed_resources", return_value=RESOURCES): with patch_async(f"{m}.has_inconsistencies", return_value="Some error"): status, data = await run(server_url, FAKE_AUTH) assert status is False print(data) assert data == { "blog/articles": "Some error", "security/blocklist": "Some error" }
async def test_filter_on_action_uptake(mock_aioresponses): mock_aioresponses.get( NORMANDY_URL.format(server=NORMANDY_SERVER), payload=[{ "recipe": RECIPE }], ) with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run( api_key="", sources=["action"], server=NORMANDY_SERVER, max_error_percentage=10, channels=["release"], ) assert status is False assert data == { "sources": { "action/AddonStudyAction": { "error_rate": 10.0, "statuses": { "success": 9000, "action_post_execution_error": 1000 }, "ignored": {}, "min_timestamp": "2019-09-16T00:30:00", "max_timestamp": "2019-09-16T00:40:00", } }, "min_rate": 10.0, "max_rate": 10.0, "min_timestamp": "2019-09-16T00:30:00", "max_timestamp": "2019-09-16T01:00:00", }
async def test_negative(mock_responses): url = "http://server.local/v1/buckets/monitor/collections/changes/records" mock_responses.get( url, status=200, payload={ "data": [{ "id": "a", "bucket": "main", "last_modified": 1573086234731 }] }, ) module = "checks.remotesettings.push_timestamp" with patch_async(f"{module}.get_push_timestamp", return_value="2573086234731"): status, data = await run( remotesettings_server="http://server.local/v1", push_server="") assert status is False assert data == { "remotesettings": { "datetime": "2019-11-07T00:23:54.731000+00:00", "timestamp": "1573086234731", }, "push": { "datetime": "2051-07-16T02:10:34.731000+00:00", "timestamp": "2573086234731", }, }
async def test_negative(mock_aioresponses): server_url = "http://fake.local/v1" records_url = server_url + RECORDS_URL.format("main", "normandy-recipes") mock_aioresponses.get( records_url, payload={ "data": [{ "id": "12", "last_modified": 42, "signature": { "signature": "abc", "x5u": "http://fake-x5u-url" }, "recipe": { "id": 12 }, }] }, ) with patch_async(f"{MODULE}.validate_signature", side_effect=ValueError("boom")): status, data = await run(server_url, "normandy-recipes", root_hash="AA") assert status is False assert data == {"12": "ValueError('boom')"}
async def test_positive_by_channel(mock_aioresponses): mock_aioresponses.get( NORMANDY_URL.format(server=NORMANDY_SERVER), payload=[{ "recipe": { "id": 111 } }, { "recipe": { "id": 123 } }], ) # Ignore the extra recipes reported on beta: with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run(server=NORMANDY_SERVER, api_key="", channels=["release"]) assert status is True assert data == { "missing": [], "extras": [], "min_timestamp": "2019-09-16T01:36:12.348", "max_timestamp": "2019-09-16T07:24:58.741", }
async def test_include_legacy_versions(mock_aioresponses): with patch_async(f"{MODULE}.fetch_remotesettings_uptake", return_value=FAKE_ROWS) as mocked: await run(max_error_percentage=0.1, include_legacy_versions=True) assert mocked.call_args_list == [ mock.call(sources=[], channels=[], period_hours=4, min_version=None) ]
async def test_positive(mock_responses): server_url = "http://fake.local/v1" collection_url = server_url + COLLECTION_URL.format("bid", "cid") mock_responses.get( collection_url, payload={ "data": { "status": "work-in-progress", "last_edit_date": (utcnow() - timedelta(days=10)).isoformat(), "last_edit_by": "ldap:[email protected]", } }, ) collection_url = server_url + COLLECTION_URL.format("bid", "cid2") mock_responses.get( collection_url, payload={ "data": { "status": "signed", "last_edit_date": "2017-08-01T01:00.000" } }, ) with patch_async(f"{MODULE}.fetch_signed_resources", return_value=RESOURCES): status, data = await run(server_url, FAKE_AUTH, max_age=25) assert status is True assert data == {}
async def test_error_rate_with_classifyclient_and_telemetry(mock_aioresponses): mock_aioresponses.get( NORMANDY_URL.format(server=NORMANDY_SERVER), payload=[{ "recipe": { **RECIPE, "filter_expression": ('(normandy.country in ["US"]) &&' "(normandy.telemetry.main.sum > 0)"), } }], ) max_error_percentage = { "default": 0.1, "with_classify_client": 20, "with_telemetry": 30, } with patch_async(f"{MODULE}.fetch_bigquery", return_value=FAKE_ROWS): status, data = await run( server=NORMANDY_SERVER, max_error_percentage=max_error_percentage, ) assert status is False assert data["sources"]["recipe/123"]["error_rate"] == 37.5 assert data["sources"]["recipe/123"]["with_telemetry"] assert data["sources"]["recipe/123"]["with_classify_client"]
async def test_positive_ignore_recents(mock_aioresponses): mock_aioresponses.get( NORMANDY_URL.format(server=NORMANDY_SERVER), payload=[{ "recipe": { "id": 123 } }, { "recipe": { "id": 456 } }], ) mock_aioresponses.get( RECIPE_URL.format(server=NORMANDY_SERVER, id="111"), payload={ "id": 111, "last_updated": "2019-09-16T02:36:12.348Z" }, ) with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run(server=NORMANDY_SERVER, api_key="") assert status is True assert data == { "missing": [], "extras": [], "min_timestamp": "2019-09-16T01:36:12.348", "max_timestamp": "2019-09-16T07:24:58.741", }
async def test_exclude_sources(): fake_rows = FAKE_ROWS + [ { "min_timestamp": "2020-01-17T08:10:00", "max_timestamp": "2020-01-17T08:20:00", "status": "sync_error", "source": "settings-sync", "channel": "release", "version": "71", "total": 50000, }, ] with patch_async(f"{MODULE}.fetch_redash", return_value=fake_rows): status, data = await run( api_key="", ignore_status=["settings-sync"], max_error_percentage=30, ) assert status is True assert data == { "sources": {}, "min_rate": 0.0, "max_rate": 20.45, "min_timestamp": "2020-01-17T08:10:00", "max_timestamp": "2020-01-17T08:30:00", }
async def test_filter_on_runner_uptake(mock_aioresponses): mock_aioresponses.get( NORMANDY_URL.format(server=NORMANDY_SERVER), payload=[{ "recipe": RECIPE }], ) with patch_async(f"{MODULE}.fetch_bigquery", return_value=FAKE_ROWS): status, data = await run( sources=["runner"], server=NORMANDY_SERVER, max_error_percentage=0.1, channels=["release"], ) assert status is False assert data == { "sources": { "runner": { "error_rate": 20.0, "statuses": { "success": 2000, "server_error": 500 }, "ignored": {}, "min_timestamp": "2019-09-16T00:30:00", "max_timestamp": "2019-09-16T00:40:00", } }, "min_rate": 0.0, "max_rate": 20.0, "min_timestamp": "2019-09-16T00:30:00", "max_timestamp": "2019-09-16T01:00:00", }
async def test_negative(): with patch_async(f"{MODULE}.fetch_bigquery", return_value=FAKE_ROWS): status, data = await run(max_error_percentage=0.1, channels=["release"]) assert status is False assert data == { "sources": { "blocklists/addons": { "error_rate": 12.5, "statuses": { "success": 20000, "up_to_date": 15000, "network_error": 5000, }, "ignored": {}, "min_timestamp": "2020-01-17T08:10:00", "max_timestamp": "2020-01-17T08:20:00", } }, "min_rate": 2.44, "max_rate": 12.5, "min_timestamp": "2020-01-17T08:10:00", "max_timestamp": "2020-01-17T08:30:00", }
async def test_ignore_version(): with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run( api_key="", max_error_percentage=0.1, ignore_versions=[68], channels=["release"], ) assert status is False assert data == { "sources": { "blocklists/addons": { "error_rate": 12.5, "ignored": {"success": 10000}, "statuses": { "network_error": 5000, "success": 10000, "up_to_date": 15000, }, "min_timestamp": "2020-01-17T08:10:00", "max_timestamp": "2020-01-17T08:20:00", } }, "min_rate": 2.44, "max_rate": 12.5, "min_timestamp": "2020-01-17T08:10:00", "max_timestamp": "2020-01-17T08:30:00", }
async def test_negative(mock_responses, mock_aioresponses): server_url = "http://fake.local/v1" x5u_url = "http://fake-x5u-url/" changes_url = server_url + RECORDS_URL.format("monitor", "changes") mock_responses.get( changes_url, payload={ "data": [{ "id": "abc", "bucket": "bid", "collection": "cid", "last_modified": 42 }] }, ) mock_aioresponses.get(x5u_url, body=CERT) metadata = {"signature": {"x5u": x5u_url, "signature": ""}} with patch_async( f"{MODULE}.download_collection_data", return_value=(metadata, [], 42), ): status, data = await run(server_url, ["bid"], root_hash="AA") assert status is False assert data == { "bid/cid": "CertificateExpired(datetime.datetime(2019, 11, 11, 22, 44, 31))" }
async def test_positive(mock_responses): server_url = "http://fake.local/v1" changes_url = server_url + RECORDS_URL.format("monitor", "changes") mock_responses.get( changes_url, payload={ "data": [{ "id": "abc", "bucket": "bid", "collection": "cid", "last_modified": 42 }] }, ) mock_responses.get( server_url + RECORDS_URL.format("bid", "cid"), payload={"data": []}, headers={"ETag": '"42"'}, ) mock_responses.get( server_url + COLLECTION_URL.format("bid", "cid"), payload={"data": { "signature": {} }}, ) with patch_async(f"{MODULE}.validate_signature"): status, data = await run(server_url, ["bid"], root_hash="AA") assert status is True assert data == {}
async def test_positive_with_margin(mock_responses): server_timestamp = 1573086234731 server_datetime = utcfromtimestamp(server_timestamp) url = "http://server.local/v1/buckets/monitor/collections/changes/records" mock_responses.get( url, status=200, payload={ "data": [ { "id": "b", "bucket": "main", "last_modified": server_timestamp }, ] }, ) with mock.patch(f"{MODULE}.utcnow", return_value=server_datetime): with patch_async(f"{MODULE}.get_push_timestamp", return_value=42): status, _ = await run( remotesettings_server="http://server.local/v1", push_server="") assert status is True
async def test_min_total_events(mock_aioresponses): mock_aioresponses.get( NORMANDY_URL.format(server=NORMANDY_SERVER), payload=[{ "recipe": { "id": 123 } }], ) with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run( api_key="", server=NORMANDY_SERVER, max_error_percentage=0.1, min_total_events=40001, channels=["release"], ) assert status is True assert data == { "sources": {}, "min_rate": None, "max_rate": None, "min_timestamp": "2019-09-16T00:30:00", "max_timestamp": "2019-09-16T01:00:00", }
async def test_negative(mock_aioresponses): mock_aioresponses.get( NORMANDY_URL.format(server=NORMANDY_SERVER), payload=[ { "recipe": { "id": 123 } }, { "recipe": { "id": 456 } }, { "recipe": { "id": 789 } }, ], ) with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run(server=NORMANDY_SERVER, api_key="") assert status is False assert data == { "missing": [789], "extras": [], "min_timestamp": "2019-09-16T01:36:12.348", "max_timestamp": "2019-09-16T07:24:58.741", }
async def test_retry_fetch_records(mock_responses): server_url = "http://fake.local/v1" changes_url = server_url + RECORDS_URL.format("monitor", "changes") mock_responses.get( changes_url, payload={ "data": [{ "id": "abc", "bucket": "bid", "collection": "cid", "last_modified": 42 }] }, ) records_url = server_url + CHANGESET_URL.format("bid", "cid") mock_responses.get(records_url, status=500) mock_responses.get(records_url, status=500) mock_responses.get( records_url, payload={ "metadata": { "signature": {} }, "changes": [], "timestamp": 42 }, ) with patch_async(f"{MODULE}.validate_signature"): status, data = await run(server_url, ["bid"], root_hash="AA") assert status is True
async def test_positive_no_data(): with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run( api_key="", max_percentiles={"50": 42}, channels=["aurora"] ) assert status is True assert data["percentiles"] == "No broadcast data during this period."
async def test_positive(mock_responses): server_url = "http://fake.local/v1" module = "checks.remotesettings.latest_approvals" resources = [{ "last_modified": utcnow().timestamp() * 1000, "source": { "bucket": "bid", "collection": "cid" }, }] with patch_async(f"{module}.fetch_signed_resources", return_value=resources): with patch_async(f"{module}.get_latest_approvals", return_value=INFOS): status, data = await run({}, server_url, FAKE_AUTH) assert status is True assert data == {"bid/cid": INFOS}
async def test_value_count(): with patch_async(f"{MODULE}.fetch_redash", return_value=INPUT_ROWS): success, data = await run(api_key="", max_threshold=35, value_count=2, max_over_rate=0.5) assert success is False assert len(data["results"]) == 2
async def test_negative(): with patch_async(f"{MODULE}.fetch_bigquery", return_value=FAKE_ROWS): status, data = await run(max_percentiles={"10": 99}) assert status is False assert data == { "min_timestamp": "2019-09-16T02:36:12.348000", "max_timestamp": "2019-09-16T06:24:58.741000", "percentiles": {"10": {"value": 100, "max": 99}}, }
async def test_negative(): with patch_async(f"{MODULE}.fetch_normandy_uptake", return_value=FAKE_ROWS): status, data = await run(max_error_percentage=1.0, channels=["release"]) assert status is False assert data == { "error_rate": 10.0, "min_timestamp": "2019-09-16T01:36:12.348000", "max_timestamp": "2019-09-16T07:24:58.741000", }
async def test_filter_by_channel(): with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run( api_key="", max_percentiles={"10": 99}, channels=["beta"] ) assert status is False assert data == { "min_timestamp": "2019-09-16T01:00:00.000", "max_timestamp": "2019-09-16T02:00:00.000", "percentiles": {"10": {"value": 100, "max": 99}}, }
async def test_filter_by_channel(): with patch_async(f"{MODULE}.fetch_redash", return_value=FAKE_ROWS): status, data = await run(api_key="", max_error_percentage=100.0, channels=["beta"]) assert status is True assert data == { "error_rate": 50.0, "min_timestamp": "2019-09-16T01:36:12.348", "max_timestamp": "2019-09-16T07:24:58.741", }
async def test_positive(): with patch_async(f"{MODULE}.fetch_bigquery", return_value=FAKE_ROWS): status, data = await run(max_error_percentage=100.0, channels=["release"]) assert status is True assert data == { "sources": {}, "min_rate": 2.44, "max_rate": 12.5, "min_timestamp": "2020-01-17T08:10:00", "max_timestamp": "2020-01-17T08:30:00", }