def test_multipart_encode_with_files(): raw_crash = { 'ProjectName': 'Test', 'Version': '1.0', # The io.BytesIO is a file-like object, so this will end up as a file. 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) } body, headers = multipart_encode(raw_crash, boundary='socorrobound1234567') assert headers['Content-Type'] == 'multipart/form-data; boundary=socorrobound1234567' assert headers['Content-Length'] == '431' assert ( body == ( b'--socorrobound1234567\r\n' b'Content-Disposition: form-data; name="ProjectName"\r\n' b'Content-Type: text/plain; charset=utf-8\r\n' b'\r\n' b'Test\r\n' b'--socorrobound1234567\r\n' b'Content-Disposition: form-data; name="Version"\r\n' b'Content-Type: text/plain; charset=utf-8\r\n' b'\r\n' b'1.0\r\n' b'--socorrobound1234567\r\n' b'Content-Disposition: form-data; name="upload_file_minidump"; filename="fakecrash.dump"\r\n' b'Content-Type: application/octet-stream\r\n' b'\r\n' b'abcd1234\r\n' b'--socorrobound1234567--\r\n' ) )
def test_extract_payload_2_dumps(self, request_generator): data, headers = multipart_encode({ "ProductName": "Firefox", "Version": "1", "upload_file_minidump": ("fakecrash.dump", io.BytesIO(b"deadbeef")), "upload_file_minidump_flash1": ( "fakecrash2.dump", io.BytesIO(b"abcd1234"), ), }) req = request_generator(method="POST", path="/submit", headers=headers, body=data) bsp = BreakpadSubmitterResource(self.empty_config) expected_raw_crash = { "ProductName": "Firefox", "Version": "1", "payload": "multipart", } expected_dumps = { "upload_file_minidump": b"deadbeef", "upload_file_minidump_flash1": b"abcd1234", } assert bsp.extract_payload(req) == (expected_raw_crash, expected_dumps)
def test_legacy_processing(self, client, loggingmock): # NOTE(willkg): This encodes throttle result which is 0 (ACCEPT) crash_id = 'de1bb258-cbbf-4589-a673-34f800160918' data, headers = multipart_encode({ 'uuid': crash_id, 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')), 'legacy_processing': '0', # ACCEPT 'throttle_rate': '100', }) with loggingmock(['antenna']) as lm: result = client.simulate_post( '/submit', headers=headers, body=data ) assert result.status_code == 200 assert lm.has_record( name='antenna.breakpad_resource', levelname='INFO', msg_contains='%s: matched by FROM_CRASHID; returned ACCEPT' % crash_id )
def test_extract_payload_compressed(self, request_generator): data, headers = multipart_encode({ 'ProductName': 'Firefox', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) data = compress(data) headers['Content-Encoding'] = 'gzip' req = request_generator( method='POST', path='/submit', headers=headers, body=data, ) bsp = BreakpadSubmitterResource(self.empty_config) expected_raw_crash = { 'ProductName': 'Firefox', 'Version': '1.0', } expected_dumps = {'upload_file_minidump': b'abcd1234'} assert bsp.extract_payload(req) == (expected_raw_crash, expected_dumps)
def test_legacy_processing_bad_values(self, legacy, throttle_rate, client, loggingmock): data, headers = multipart_encode({ 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')), # These are invalid values for legacy_processing and throttle_rate 'legacy_processing': legacy, 'throttle_rate': throttle_rate }) with loggingmock(['antenna']) as lm: result = client.simulate_post( '/submit', headers=headers, body=data ) assert result.status_code == 200 # Verify it didn't match with ALREADY_THROTTLED and instead matched # with a rule indicating it went through throttling. assert lm.has_record( name='antenna.breakpad_resource', levelname='INFO', msg_contains='matched by is_thunderbird_seamonkey; returned ACCEPT' )
def _test_crash_size(self, posturl, size, crash_generator): crash_payload = self._generate_sized_crash(size, crash_generator) payload, headers = mini_poster.multipart_encode(crash_payload) if len(payload) != size: raise ValueError('payload size %s', len(payload)) try: resp = requests.post(posturl, headers=headers, data=payload) return resp.status_code except requests.exceptions.ConnectionError as exc: # NOTE(willkg): requests uses httplib which raises an exception if # the connection is closed, but doesn't read the HTTP response that # might be there. Thus requests never gets the HTTP response. # # So the best we can test for at this time without a ton of work is # to make sure we get a ConnectionError with a broken pipe. # # https://github.com/kennethreitz/requests/issues/2422 if 'Broken pipe' in str(exc): # Treating this as a 413 return 413 raise return 200
def test_retry(self, client, loggingmock): crash_id = 'de1bb258-cbbf-4589-a673-34f800160918' data, headers = multipart_encode({ 'uuid': crash_id, 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) client.rebuild_app({ 'CRASHSTORAGE_CLASS': BadCrashStorage.__module__ + '.' + BadCrashStorage.__name__, }) with loggingmock(['antenna']) as lm: result = client.simulate_post('/submit', headers=headers, body=data) assert result.status_code == 200 # The storage is bad, so this should raise errors and then log something client.join_app() # We're using BadCrashStorage so the crashmover should retry 20 # times logging a message each time and then give up for i in range(1, MAX_ATTEMPTS): assert lm.has_record( name='antenna.breakpad_resource', levelname='ERROR', msg_contains=( 'Exception when processing save queue (%s); error %d/%d' % (crash_id, i, MAX_ATTEMPTS)))
def test_extract_payload_2_dumps(self, request_generator): data, headers = multipart_encode({ 'ProductName': 'Firefox', 'Version': '1', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'deadbeef')), 'upload_file_minidump_flash1': ('fakecrash2.dump', io.BytesIO(b'abcd1234')), }) req = request_generator( method='POST', path='/submit', headers=headers, body=data, ) bsp = BreakpadSubmitterResource(self.empty_config) expected_raw_crash = { 'ProductName': 'Firefox', 'Version': '1', } expected_dumps = { 'upload_file_minidump': b'deadbeef', 'upload_file_minidump_flash1': b'abcd1234' } assert bsp.extract_payload(req) == (expected_raw_crash, expected_dumps)
def test_extract_payload_2_dumps(self, request_generator): data, headers = multipart_encode({ 'ProductName': 'Test', 'Version': '1', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'deadbeef')), 'upload_file_minidump_flash1': ('fakecrash2.dump', io.BytesIO(b'abcd1234')), }) req = request_generator( method='POST', path='/submit', headers=headers, body=data, ) bsp = BreakpadSubmitterResource(self.empty_config) expected_raw_crash = { 'ProductName': 'Test', 'Version': '1', 'dump_checksums': { 'upload_file_minidump': '4f41243847da693a4f356c0486114bc6', 'upload_file_minidump_flash1': 'e19d5cd5af0378da05f63f891c7467af', } } expected_dumps = { 'upload_file_minidump': b'deadbeef', 'upload_file_minidump_flash1': b'abcd1234' } assert bsp.extract_payload(req) == (expected_raw_crash, expected_dumps)
def test_multipart_encode_with_files(): raw_crash = { "ProjectName": "Test", "Version": "1.0", # The io.BytesIO is a file-like object, so this will end up as a file. "upload_file_minidump": ("fakecrash.dump", io.BytesIO(b"abcd1234")), } body, headers = multipart_encode(raw_crash, boundary="socorrobound1234567") assert (headers["Content-Type"] == "multipart/form-data; boundary=socorrobound1234567") assert headers["Content-Length"] == "431" assert body == ( b"--socorrobound1234567\r\n" b'Content-Disposition: form-data; name="ProjectName"\r\n' b"Content-Type: text/plain; charset=utf-8\r\n" b"\r\n" b"Test\r\n" b"--socorrobound1234567\r\n" b'Content-Disposition: form-data; name="Version"\r\n' b"Content-Type: text/plain; charset=utf-8\r\n" b"\r\n" b"1.0\r\n" b"--socorrobound1234567\r\n" b'Content-Disposition: form-data; name="upload_file_minidump"; filename="fakecrash.dump"\r\n' b"Content-Type: application/octet-stream\r\n" b"\r\n" b"abcd1234\r\n" b"--socorrobound1234567--\r\n")
def test_extract_payload_compressed(self, request_generator): data, headers = multipart_encode({ 'ProductName': 'Firefox', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) data = compress(data) headers['Content-Encoding'] = 'gzip' req = request_generator( method='POST', path='/submit', headers=headers, body=data, ) bsp = BreakpadSubmitterResource(self.empty_config) expected_raw_crash = { 'ProductName': 'Firefox', 'Version': '1.0', } expected_dumps = { 'upload_file_minidump': b'abcd1234' } assert bsp.extract_payload(req) == (expected_raw_crash, expected_dumps)
def test_extract_payload_compressed(self, request_generator): data, headers = multipart_encode({ "ProductName": "Firefox", "Version": "1.0", "upload_file_minidump": ("fakecrash.dump", io.BytesIO(b"abcd1234")), }) data = compress(data) headers["Content-Encoding"] = "gzip" req = request_generator(method="POST", path="/submit", headers=headers, body=data) bsp = BreakpadSubmitterResource(self.empty_config) expected_raw_crash = { "ProductName": "Firefox", "Version": "1.0", "payload": "multipart", } expected_dumps = {"upload_file_minidump": b"abcd1234"} assert bsp.extract_payload(req) == (expected_raw_crash, expected_dumps)
def test_crash_storage(self, client, s3mock, mock_generate_test_filepath): # .verify_write_to_bucket() writes to the bucket to verify Antenna can # write to it and the configuration is correct s3mock.add_step( method='PUT', url='http://fakes3:4569/fakebucket/test/testwrite.txt', body=b'test', resp=s3mock.fake_response(status_code=200) ) # # We want to verify these files are saved in this specific order. s3mock.add_step( method='PUT', url='http://fakes3:4569/fakebucket/v1/dump_names/de1bb258-cbbf-4589-a673-34f800160918', body=b'["upload_file_minidump"]', resp=s3mock.fake_response(status_code=200) ) s3mock.add_step( method='PUT', url='http://fakes3:4569/fakebucket/v1/dump/de1bb258-cbbf-4589-a673-34f800160918', body=b'abcd1234', resp=s3mock.fake_response(status_code=200) ) s3mock.add_step( method='PUT', url='http://fakes3:4569/fakebucket/v2/raw_crash/de1/20160918/de1bb258-cbbf-4589-a673-34f800160918', # Not going to compare the body here because it's just the raw crash resp=s3mock.fake_response(status_code=200) ) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Fennec', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration. client.rebuild_app({ 'CRASHSTORAGE_CLASS': 'antenna.ext.s3.crashstorage.S3CrashStorage', 'CRASHSTORAGE_ENDPOINT_URL': 'http://fakes3:4569', 'CRASHSTORAGE_ACCESS_KEY': 'fakekey', 'CRASHSTORAGE_SECRET_ACCESS_KEY': 'fakesecretkey', 'CRASHSTORAGE_BUCKET_NAME': 'fakebucket', }) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() # Verify the collector returns a 200 status code and the crash id # we fed it. assert result.status_code == 200 assert result.content == b'CrashID=bp-de1bb258-cbbf-4589-a673-34f800160918\n' # Assert we did the entire s3 conversation assert s3mock.remaining_conversation() == []
def test_region_and_bucket_with_periods(self, client, s3mock): # # .verify_configuration() calls HEAD on the bucket to verify it exists # # and the configuration is correct. ROOT = 'https://s3-us-west-1.amazonaws.com/' s3mock.add_step( method='HEAD', url=ROOT + 'fakebucket.with.periods', resp=s3mock.fake_response(status_code=200) ) # We want to verify these files are saved in this specific order. s3mock.add_step( method='PUT', url=ROOT + 'fakebucket.with.periods/v1/dump_names/de1bb258-cbbf-4589-a673-34f800160918', body=b'["upload_file_minidump"]', resp=s3mock.fake_response(status_code=200) ) s3mock.add_step( method='PUT', url=ROOT + 'fakebucket.with.periods/v1/dump/de1bb258-cbbf-4589-a673-34f800160918', body=b'abcd1234', resp=s3mock.fake_response(status_code=200) ) s3mock.add_step( method='PUT', url=ROOT + 'fakebucket.with.periods/v2/raw_crash/de1/20160918/de1bb258-cbbf-4589-a673-34f800160918', # Not going to compare the body here because it's just the raw crash resp=s3mock.fake_response(status_code=200) ) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration. client.rebuild_app({ 'CRASHSTORAGE_CLASS': 'antenna.ext.s3.crashstorage.S3CrashStorage', 'CRASHSTORAGE_REGION': 'us-west-1', 'CRASHSTORAGE_ACCESS_KEY': 'fakekey', 'CRASHSTORAGE_SECRET_ACCESS_KEY': 'fakesecretkey', 'CRASHSTORAGE_BUCKET_NAME': 'fakebucket.with.periods', }) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() # Verify the collector returns a 200 status code and the crash id # we fed it. assert result.status_code == 200 assert result.content == b'CrashID=bp-de1bb258-cbbf-4589-a673-34f800160918\n' # Assert we did the entire s3 conversation assert s3mock.remaining_conversation() == []
def test_flow(self, client): """Verify posting a crash gets to crash storage in the right shape""" client.rebuild_app({ 'THROTTLE_RULES': 'antenna.throttler.accept_all' }) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() assert result.status_code == 200 bsr = client.get_resource_by_name('breakpad') # Now we've got the BreakpadSubmitterResource, so we can pull out the # crashstorage, verify there's only one crash in it and then verify the # contents of the crash. crashstorage = bsr.crashstorage # 1 raw crash and 1 dump assert len(crashstorage.saved_things) == 2 # First thing is the dump assert ( crashstorage.saved_things[0] == { 'crash_id': 'de1bb258-cbbf-4589-a673-34f800160918', 'type': 'upload_file_minidump', 'data': b'abcd1234' } ) # Second thing is the raw crash metadata assert ( crashstorage.saved_things[1] == { 'crash_id': 'de1bb258-cbbf-4589-a673-34f800160918', 'type': 'raw_crash', 'data': { 'ProductName': 'Test', 'Version': '1.0', 'dump_checksums': {'upload_file_minidump': 'e19d5cd5af0378da05f63f891c7467af'}, 'legacy_processing': 0, 'throttle_rate': 100, 'submitted_timestamp': '2011-09-06T00:00:00+00:00', 'timestamp': 1315267200.0, 'type_tag': 'bp', 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918' } } )
def test_crash_storage(self, client, s3mock, mock_generate_test_filepath): # .verify_write_to_bucket() writes to the bucket to verify Antenna can # write to it and the configuration is correct s3mock.add_step(method='PUT', url='http://fakes3:4569/fakebucket/test/testwrite.txt', body=b'test', resp=s3mock.fake_response(status_code=200)) # # We want to verify these files are saved in this specific order. s3mock.add_step( method='PUT', url= 'http://fakes3:4569/fakebucket/v1/dump_names/de1bb258-cbbf-4589-a673-34f800160918', body=b'["upload_file_minidump"]', resp=s3mock.fake_response(status_code=200)) s3mock.add_step( method='PUT', url= 'http://fakes3:4569/fakebucket/v1/dump/de1bb258-cbbf-4589-a673-34f800160918', body=b'abcd1234', resp=s3mock.fake_response(status_code=200)) s3mock.add_step( method='PUT', url= 'http://fakes3:4569/fakebucket/v2/raw_crash/de1/20160918/de1bb258-cbbf-4589-a673-34f800160918', # Not going to compare the body here because it's just the raw crash resp=s3mock.fake_response(status_code=200)) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Fennec', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration. client.rebuild_app({ 'CRASHSTORAGE_CLASS': 'antenna.ext.s3.crashstorage.S3CrashStorage', 'CRASHSTORAGE_ENDPOINT_URL': 'http://fakes3:4569', 'CRASHSTORAGE_ACCESS_KEY': 'fakekey', 'CRASHSTORAGE_SECRET_ACCESS_KEY': 'fakesecretkey', 'CRASHSTORAGE_BUCKET_NAME': 'fakebucket', }) result = client.simulate_post('/submit', headers=headers, body=data) client.join_app() # Verify the collector returns a 200 status code and the crash id # we fed it. assert result.status_code == 200 assert result.content == b'CrashID=bp-de1bb258-cbbf-4589-a673-34f800160918\n' # Assert we did the entire s3 conversation assert s3mock.remaining_conversation() == []
def test_missing_content_type(self, posturl, s3conn, crash_generator): """Test crash missing a content-type header is discarded""" raw_crash, dumps = crash_generator.generate() payload, headers = mini_poster.multipart_encode(raw_crash) # Send no Content-Type header resp = requests.post(posturl, headers={}, data=payload) assert resp.status_code == 200 assert str(resp.content, encoding='utf-8') == 'Discarded=1'
def test_load_files(self, client, tmpdir): """Verify we can rebuild the crash from the fs""" crash_id = 'de1bb258-cbbf-4589-a673-34f800160918' data, headers = multipart_encode({ 'uuid': crash_id, 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration. client.rebuild_app({ 'BASEDIR': str(tmpdir), 'THROTTLE_RULES': 'antenna.throttler.accept_all', 'CRASHSTORAGE_CLASS': 'antenna.ext.fs.crashstorage.FSCrashStorage', 'CRASHSTORAGE_FS_ROOT': str(tmpdir.join('antenna_crashes')), }) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() assert result.status_code == 200 config = ConfigManager.from_dict({ 'FS_ROOT': str(tmpdir.join('antenna_crashes')), }) fscrashstore = FSCrashStorage(config) raw_crash, dumps = fscrashstore.load_raw_crash(crash_id) assert ( raw_crash == { 'uuid': crash_id, 'ProductName': 'Test', 'MinidumpSha256Hash': 'e9cee71ab932fde863338d08be4de9dfe39ea049bdafb342ce659ec5450b69ae', 'Version': '1.0', 'dump_checksums': { 'upload_file_minidump': 'e9cee71ab932fde863338d08be4de9dfe39ea049bdafb342ce659ec5450b69ae' }, 'legacy_processing': 0, 'throttle_rate': 100, 'submitted_timestamp': '2011-09-06T00:00:00+00:00', 'timestamp': 1315267200.0, 'type_tag': 'bp', } ) assert dumps == {'upload_file_minidump': b'abcd1234'}
def test_wrong_boundary(self, posturl, s3conn, crash_generator): """Post a crash with a header with wrong boundary marker""" raw_crash, dumps = crash_generator.generate() payload, headers = mini_poster.multipart_encode(raw_crash) # Mangle the header changing the boundary to something wrong headers['Content-Type'] = 'multipart/form-data; boundary=foo' resp = requests.post(posturl, headers=headers, data=payload) assert resp.status_code == 200 assert str(resp.content, encoding='utf-8') == 'Discarded=1'
def test_load_files(self, client, tmpdir): """Verify we can rebuild the crash from the fs""" crash_id = 'de1bb258-cbbf-4589-a673-34f800160918' data, headers = multipart_encode({ 'uuid': crash_id, 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration. client.rebuild_app({ 'BASEDIR': str(tmpdir), 'THROTTLE_RULES': 'antenna.throttler.accept_all', 'CRASHSTORAGE_CLASS': 'antenna.ext.fs.crashstorage.FSCrashStorage', 'CRASHSTORAGE_FS_ROOT': str(tmpdir.join('antenna_crashes')), }) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() assert result.status_code == 200 config = ConfigManager.from_dict({ 'FS_ROOT': str(tmpdir.join('antenna_crashes')), }) fscrashstore = FSCrashStorage(config) raw_crash, dumps = fscrashstore.load_raw_crash(crash_id) assert ( raw_crash == { 'uuid': crash_id, 'ProductName': 'Test', 'Version': '1.0', 'dump_checksums': {'upload_file_minidump': 'e19d5cd5af0378da05f63f891c7467af'}, 'legacy_processing': 0, 'throttle_rate': 100, 'submitted_timestamp': '2011-09-06T00:00:00+00:00', 'timestamp': 1315267200.0, 'type_tag': 'bp', } ) assert dumps == {'upload_file_minidump': b'abcd1234'}
def test_extract_payload_with_nulls(self, data, expected_raw_crash, expected_dumps, request_generator): data, headers = multipart_encode(data) req = request_generator( method='POST', path='/submit', headers=headers, body=data, ) bsp = BreakpadSubmitterResource(self.empty_config) assert bsp.extract_payload(req) == (expected_raw_crash, expected_dumps)
def test_no_payload(self, posturl, s3conn, crash_generator): """Test crash with no payload is discarded""" raw_crash, dumps = crash_generator.generate() payload, headers = mini_poster.multipart_encode(raw_crash) # Zero out the content-length because we're sending an empty # payload. headers['Content-Length'] = '0' # Send no payload resp = requests.post(posturl, headers=headers, data='') assert resp.status_code == 200 assert str(resp.content, encoding='utf-8') == 'Discarded=1'
def test_extract_payload_bad_json(self, request_generator): # If the JSON doesn't parse (invalid control character), it raises # a MalformedCrashReport data, headers = multipart_encode( {"extra": '{"ProductName":"Firefox\n"}'}) req = request_generator(method="POST", path="/submit", headers=headers, body=data) bsp = BreakpadSubmitterResource(self.empty_config) with pytest.raises(MalformedCrashReport, match="bad_json"): bsp.extract_payload(req)
def test_content_length_non_int(self, posturl, crash_generator): """Post a crash with a content-length that isn't an int""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Add wrong content-length headers['Content-Length'] = 'foo' resp = http_post(posturl, headers, payload) assert resp.getcode() == 400
def test_wrong_boundary(self, posturl, s3conn, crash_generator): """Post a crash with a header with wrong boundary marker.""" raw_crash, dumps = crash_generator.generate() payload, headers = mini_poster.multipart_encode(raw_crash) # Mangle the header changing the boundary to something wrong headers["Content-Type"] = "multipart/form-data; boundary=foo" resp = requests.post(posturl, headers=headers, data=payload) assert resp.status_code == 400 assert (str(resp.content, encoding="utf-8") == "Discarded=malformed_no_annotations")
def test_junk_payload(self, posturl, s3conn, crash_generator): """Test crash with a junk payload is discarded""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Junkify the payload payload = 'foobarbaz' resp = requests.post(posturl, headers=headers, data=payload) assert resp.status_code == 200 assert str(resp.content, encoding='utf-8') == 'Discarded=1'
def test_compressed_payload_bad_header(self, posturl, s3conn, crash_generator): """Test crash with a compressed payload, but missing header is discarded""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Compress the payload, but don't set the header payload = mini_poster.compress(payload) resp = requests.post(posturl, headers=headers, data=payload) assert resp.status_code == 200 assert str(resp.content, encoding='utf-8') == 'Discarded=1'
def test_compressed_header_non_compressed_payload(self, posturl, s3conn, crash_generator): """Test crash with a compressed header, but non-compressed payload is discarded""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Add compressed header, but don't compress the payload headers['Content-Encoding'] = 'gzip' resp = requests.post(posturl, headers=headers, data=payload) assert resp.status_code == 200 assert str(resp.content, encoding='utf-8') == 'Discarded=1'
def test_submit_crash_report_reply(self, client): data, headers = multipart_encode({ 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) result = client.simulate_post( '/submit', headers=headers, body=data, ) assert result.status_code == 200 assert result.content.startswith(b'CrashID=bp')
def test_no_content_length(self, posturl, crash_generator): """Post a crash with no content-length""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) del headers['Content-Length'] # Do an HTTP POST with no Content-Length resp = http_post(posturl, headers, payload) assert resp.getcode() == 200 assert str(resp.read(), encoding='utf-8').startswith('CrashID=')
def test_content_length_20(self, posturl, crash_generator): """Post a crash with a content-length 20""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Add wrong content-length headers['Content-Length'] = '20' resp = http_post(posturl, headers, payload) assert resp.getcode() == 200 assert str(resp.read(), encoding='utf-8') == 'Discarded=1'
def test_junk_payload(self, posturl, s3conn, crash_generator): """Test crash with a junk payload is discarded""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Junkify the payload payload = "foobarbaz" resp = requests.post(posturl, headers=headers, data=payload) assert resp.status_code == 400 assert (str(resp.content, encoding="utf-8") == "Discarded=malformed_no_annotations")
def test_no_payload(self, posturl, s3conn, crash_generator): """Test crash with no payload is discarded""" raw_crash, dumps = crash_generator.generate() payload, headers = mini_poster.multipart_encode(raw_crash) # Zero out the content-length because we're sending an empty # payload. headers["Content-Length"] = "0" # Send no payload resp = requests.post(posturl, headers=headers, data="") assert resp.status_code == 400 assert (str( resp.content, encoding="utf-8") == "Discarded=malformed_no_content_length")
def _generate_sized_crash(self, size, crash_generator): raw_crash, dumps = crash_generator.generate() dumps['upload_file_minidump'] = '' crash_payload = mini_poster.assemble_crash_payload_dict( raw_crash, dumps) payload, headers = mini_poster.multipart_encode(crash_payload) base_size = len(payload) # Create a "dump file" which is really just a bunch of 'a' such that # the entire payload is equal to size dumps['upload_file_minidump'] = 'a' * (size - base_size) return mini_poster.assemble_crash_payload_dict(raw_crash, dumps)
def test_content_length_20(self, posturl, crash_generator): """Post a crash with a content-length 20""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Add wrong content-length headers["Content-Length"] = "20" resp = http_post(posturl, headers, payload) assert resp.getcode() == 400 assert (str(resp.read(), encoding="utf-8") == "Discarded=malformed_no_annotations")
def test_submit_crash_report_reply(self, client): data, headers = multipart_encode({ "ProductName": "Firefox", "Version": "60.0a1", "ReleaseChannel": "nightly", "upload_file_minidump": ("fakecrash.dump", io.BytesIO(b"abcd1234")), }) result = client.simulate_post("/submit", headers=headers, body=data) assert result.status_code == 200 assert result.headers["Content-Type"].startswith("text/plain") assert result.content.startswith(b"CrashID=bp")
def test_submit_crash_report_reply(self, client): data, headers = multipart_encode({ 'ProductName': 'Firefox', 'Version': '60.0a1', 'ReleaseChannel': 'nightly', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) result = client.simulate_post( '/submit', headers=headers, body=data, ) assert result.status_code == 200 assert result.headers['Content-Type'].startswith('text/plain') assert result.content.startswith(b'CrashID=bp')
def test_flow(self, client): """Verify posting a crash gets to crash storage in the right shape""" client.rebuild_app({ 'THROTTLE_RULES': 'antenna.throttler.ACCEPT_ALL', 'PRODUCTS': 'antenna.throttler.ALL_PRODUCTS' }) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() assert result.status_code == 200 bsr = client.get_resource_by_name('breakpad') # Now we've got the BreakpadSubmitterResource, so we can pull out the # crashstorage, verify there's only one crash in it and then verify the # contents of the crash. # Verify things got saved crashstorage = bsr.crashstorage assert ( crashstorage.saved_things == [ {'crash_id': 'de1bb258-cbbf-4589-a673-34f800160918'} ] ) # Verify things got published crashpublish = bsr.crashpublish assert ( crashpublish.published_things == [ {'crash_id': 'de1bb258-cbbf-4589-a673-34f800160918'} ] )
def test_existing_uuid(self, client): crash_id = 'de1bb258-cbbf-4589-a673-34f800160918' data, headers = multipart_encode({ 'uuid': crash_id, 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) result = client.simulate_post( '/submit', headers=headers, body=data ) assert result.status_code == 200 # Extract the uuid from the response content and verify that it's the # crash id we sent assert result.content.decode('utf-8') == 'CrashID=bp-%s\n' % crash_id
def test_crash_publish(self, client, pubsub): PROJECT = 'test_socorro' TOPIC = 'test_socorro_normal' SUB = 'test_subscription' pubsub.create_topic(PROJECT, TOPIC) subscription_path = pubsub.create_subscription(PROJECT, TOPIC, SUB) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Fennec', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration client.rebuild_app({ 'LOCAL_DEV_ENV': 'True', 'CRASHPUBLISH_CLASS': 'antenna.ext.pubsub.crashpublish.PubSubCrashPublish', 'CRASHPUBLISH_PROJECT_ID': PROJECT, 'CRASHPUBLISH_TOPIC_NAME': TOPIC }) # Slurp off the "test" crash id from verification pubsub.get_published_crashids(subscription_path) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() # Verify the collector returns a 200 status code and the crash id # we fed it. assert result.status_code == 200 assert result.content == b'CrashID=bp-de1bb258-cbbf-4589-a673-34f800160918\n' # Assert crash id was published crashids = pubsub.get_published_crashids(subscription_path) assert crashids == [ b'de1bb258-cbbf-4589-a673-34f800160918' ]
def test_retry_publish(self, client, loggingmock): crash_id = 'de1bb258-cbbf-4589-a673-34f800160918' data, headers = multipart_encode({ 'uuid': crash_id, 'ProductName': 'Firefox', 'Version': '60.0a1', 'ReleaseChannel': 'nightly', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) client.rebuild_app({ 'CRASHPUBLISH_CLASS': BadCrashPublish.__module__ + '.' + BadCrashPublish.__name__, }) with loggingmock(['antenna']) as lm: result = client.simulate_post( '/submit', headers=headers, body=data ) assert result.status_code == 200 # The publish is bad, so this should raise errors and then log something client.join_app() # We're using BadCrashPublish so the crashmover should retry 20 # times logging a message each time and then give up for i in range(1, MAX_ATTEMPTS): assert lm.has_record( name='antenna.breakpad_resource', levelname='ERROR', msg_contains=( 'Exception when processing queue (%s), state: %s; error %d/%d' % ( crash_id, 'publish', i, MAX_ATTEMPTS ) ) )
def test_queuing(self, client): def check_health(crashmover_pool_size, crashmover_queue_size): bpr = client.get_resource_by_name('breakpad') assert len(bpr.crashmover_queue) == crashmover_queue_size assert len(bpr.crashmover_pool) == crashmover_pool_size # Rebuild the app so the client only saves one crash at a time to s3 client.rebuild_app({ 'CONCURRENT_SAVES': '1' }) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Firefox', 'Version': '60.0a1', 'ReleaseChannel': 'nightly', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Verify initial conditions are correct--no active coroutines and # nothing in the queue check_health(crashmover_pool_size=0, crashmover_queue_size=0) # Submit a crash client.simulate_post('/submit', headers=headers, body=data) # Now there's one coroutine active and one item in the queue check_health(crashmover_pool_size=1, crashmover_queue_size=1) # Submit another crash client.simulate_post('/submit', headers=headers, body=data) # The coroutine hasn't run yet (we haven't called .join), so there's # one coroutine and two queued crashes to be saved check_health(crashmover_pool_size=2, crashmover_queue_size=2) # Now join the app and let the coroutines run and make sure the queue clears client.join_app() # No more coroutines and no more queue check_health(crashmover_pool_size=0, crashmover_queue_size=0)
def test_throttleable(self, client, loggingmock): data, headers = multipart_encode({ 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')), 'Throttleable': '0' }) with loggingmock(['antenna']) as lm: result = client.simulate_post( '/submit', headers=headers, body=data ) assert result.status_code == 200 # Verify it got matched as a THROTTLEABLE_0 assert lm.has_record( name='antenna.breakpad_resource', levelname='INFO', msg_contains='matched by THROTTLEABLE_0; returned ACCEPT' )
def text_extract_payload_kvpairs_and_json(self, request_generator, metricsmock): # If there's a JSON blob and also kv pairs, then that's a malformed # crash data, headers = multipart_encode({ 'extra': '{"ProductName":"Firefox","Version":"1.0"}', 'BadKey': 'BadValue', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) req = request_generator( method='POST', path='/submit', headers=headers, body=data, ) bsp = BreakpadSubmitterResource(self.empty_config) with metricsmock as metrics: result = bsp.extract_payload(req) assert result == ({}, {}) assert metrics.has_record( stat='malformed', tags=['reason:has_json_and_kv'] )
def test_content_length_1000(self, posturl, crash_generator): """Post a crash with a content-length greater than size of payload""" raw_crash, dumps = crash_generator.generate() # Generate the payload and headers for a crash with no dumps payload, headers = mini_poster.multipart_encode(raw_crash) # Add wrong content-length headers['Content-Length'] = '1000' try: resp = http_post(posturl, headers, payload) status_code = resp.getcode() except RemoteDisconnected as exc: # If there's an ELB and nginx times out waiting for the rest of the # request, then we get an HTTP 504. If there's no ELB (we're # connecting directly to nginx), then nginx just drops the # connection and we get back a RemoteDisconnected error. status_code = 504 # Verify we get an HTTP 504 because something timed out waiting for the # HTTP client (us) to send the rest of the data which is expected # because we sent a bad content-length assert status_code == 504
def test_storage_files(self, client, tmpdir): """Verify posting a crash gets to crash storage in the right shape""" data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Test', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration. client.rebuild_app({ 'BASEDIR': str(tmpdir), 'THROTTLE_RULES': 'antenna.throttler.accept_all', 'CRASHSTORAGE_CLASS': 'antenna.ext.fs.crashstorage.FSCrashStorage', 'CRASHSTORAGE_FS_ROOT': str(tmpdir.join('antenna_crashes')), }) result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() assert result.status_code == 200 files = get_tree(str(tmpdir)) def nix_tmpdir(fn): """Removes the tmpdir portion from the beginning of the path""" return fn[len(str(tmpdir)):] # Verify the set of files in the directory match what FSCrashStorage # should have written--no more and no less. assert ( sorted([nix_tmpdir(fn) for fn in files]) == sorted([ '/antenna_crashes/20160918/raw_crash/de1bb258-cbbf-4589-a673-34f800160918.json', '/antenna_crashes/20160918/dump_names/de1bb258-cbbf-4589-a673-34f800160918.json', '/antenna_crashes/20160918/upload_file_minidump/de1bb258-cbbf-4589-a673-34f800160918', ]) ) contents = {} for fn in files: with open(fn, 'rb') as fp: contents[nix_tmpdir(fn)] = fp.read() assert ( contents['/antenna_crashes/20160918/raw_crash/de1bb258-cbbf-4589-a673-34f800160918.json'] == ( b'{"ProductName": "Test", ' + b'"Version": "1.0", ' + b'"dump_checksums": {"upload_file_minidump": "e19d5cd5af0378da05f63f891c7467af"}, ' + b'"legacy_processing": 0, ' + b'"submitted_timestamp": "2011-09-06T00:00:00+00:00", ' + b'"throttle_rate": 100, ' + b'"timestamp": 1315267200.0, ' + b'"type_tag": "bp", ' + b'"uuid": "de1bb258-cbbf-4589-a673-34f800160918"}' ) ) assert ( contents['/antenna_crashes/20160918/dump_names/de1bb258-cbbf-4589-a673-34f800160918.json'] == b'["upload_file_minidump"]' ) assert ( contents['/antenna_crashes/20160918/upload_file_minidump/de1bb258-cbbf-4589-a673-34f800160918'] == b'abcd1234' )
def test_retrying(self, client, s3mock, loggingmock, mock_generate_test_filepath): ROOT = 'http://fakes3:4569/' # .verify_write_to_bucket() writes to the bucket to verify Antenna can # write to it and the configuration is correct s3mock.add_step( method='PUT', url=ROOT + 'fakebucket/test/testwrite.txt', body=b'test', resp=s3mock.fake_response(status_code=200) ) # Fail once with a 403, retry and then proceed. s3mock.add_step( method='PUT', url=ROOT + 'fakebucket/v1/dump_names/de1bb258-cbbf-4589-a673-34f800160918', body=b'["upload_file_minidump"]', resp=s3mock.fake_response(status_code=403) ) # Proceed with saving files. s3mock.add_step( method='PUT', url=ROOT + 'fakebucket/v1/dump_names/de1bb258-cbbf-4589-a673-34f800160918', body=b'["upload_file_minidump"]', resp=s3mock.fake_response(status_code=200) ) s3mock.add_step( method='PUT', url=ROOT + 'fakebucket/v1/dump/de1bb258-cbbf-4589-a673-34f800160918', body=b'abcd1234', resp=s3mock.fake_response(status_code=200) ) s3mock.add_step( method='PUT', url=ROOT + 'fakebucket/v2/raw_crash/de1/20160918/de1bb258-cbbf-4589-a673-34f800160918', # Not going to compare the body here because it's just the raw crash resp=s3mock.fake_response(status_code=200) ) data, headers = multipart_encode({ 'uuid': 'de1bb258-cbbf-4589-a673-34f800160918', 'ProductName': 'Fennec', 'Version': '1.0', 'upload_file_minidump': ('fakecrash.dump', io.BytesIO(b'abcd1234')) }) # Rebuild the app the test client is using with relevant configuration. client.rebuild_app({ 'CRASHSTORAGE_CLASS': 'antenna.ext.s3.crashstorage.S3CrashStorage', 'CRASHSTORAGE_ENDPOINT_URL': 'http://fakes3:4569', 'CRASHSTORAGE_ACCESS_KEY': 'fakekey', 'CRASHSTORAGE_SECRET_ACCESS_KEY': 'fakesecretkey', 'CRASHSTORAGE_BUCKET_NAME': 'fakebucket', }) with loggingmock(['antenna']) as lm: result = client.simulate_post( '/submit', headers=headers, body=data ) client.join_app() # Verify the collector returns a 200 status code and the crash id # we fed it. assert result.status_code == 200 assert result.content == b'CrashID=bp-de1bb258-cbbf-4589-a673-34f800160918\n' # Verify the retry decorator logged something assert lm.has_record( name='antenna.ext.s3.connection', levelname='WARNING', msg_contains='retry attempt 0' ) # Assert we did the entire s3 conversation assert s3mock.remaining_conversation() == []