class TestRetryAfterHeaders(unittest.TestCase, DSSAssertMixin): """Presence or absence of Retry-After headers is defined by dss.error.include_retry_after_header.""" def test_502_get_bundle_HAS_retry_after_response(self): """Mock seems resistant to multiple calls, therefore this is only used for one endpoint.""" with mock.patch('dss.api.bundles.get', side_effect=DSSException(502, 'bad_gateway', "Bad Gateway")): self.app = ThreadedLocalServer() self.app.start() uuid = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" version = datetime_to_version_format(datetime.datetime.utcnow()) url = str(UrlBuilder().set(path=f"/v1/bundles/{uuid}").add_query( "version", version).add_query("replica", 'aws')) r = self.assertGetResponse(url, 502, headers=get_auth_header()) self.assertEqual(int(r.response.headers['Retry-After']), 10) self.app.shutdown() def test_500_server_error(self): """Test that the dss_handler includes retry-after headers.""" with app.test_request_context('/test'): r = mock_500_server_error() self.assertEqual(int(r.headers['Retry-After']), 10) def test_501_not_implemented(self): """501 should not be retried.""" with app.test_request_context('/test'): r = mock_501_not_implemented() self.assertEqual(r.headers.get('Retry-After'), None) def test_502_bad_gateway(self): """Test that the dss_handler includes retry-after headers.""" with app.test_request_context('/test'): r = mock_502_bad_gateway() self.assertEqual(int(r.headers['Retry-After']), 10) def test_503_service_unavailable(self): """Test that the dss_handler includes retry-after headers.""" with app.test_request_context('/test'): r = mock_503_service_unavailable() self.assertEqual(int(r.headers['Retry-After']), 10) def test_504_504_gateway_timeout(self): """Test that the dss_handler includes retry-after headers.""" with app.test_request_context('/test'): r = mock_504_gateway_timeout() self.assertEqual(int(r.headers['Retry-After']), 10)
def test_502_get_bundle_HAS_retry_after_response(self): """Mock seems resistant to multiple calls, therefore this is only used for one endpoint.""" with mock.patch('dss.api.bundles.get', side_effect=DSSException(502, 'bad_gateway', "Bad Gateway")): self.app = ThreadedLocalServer() self.app.start() uuid = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" version = datetime_to_version_format(datetime.datetime.utcnow()) url = str(UrlBuilder().set(path=f"/v1/bundles/{uuid}").add_query( "version", version).add_query("replica", 'aws')) r = self.assertGetResponse(url, 502, headers=get_auth_header()) self.assertEqual(int(r.response.headers['Retry-After']), 10) self.app.shutdown()
def setUpClass(cls): cls.app = ThreadedLocalServer() cls.app.start() cls.bad_checksums = { 'hca-dss-crc32c': '!!!!', 'hca-dss-s3_etag': '@@@@', 'hca-dss-sha1': '####', 'hca-dss-sha256': '$$$$' }
def setUpClass(cls): super().setUpClass() cls.app = ThreadedLocalServer() cls.app.start() dss.Config.set_config(dss.BucketConfig.TEST) with open(os.path.join(os.path.dirname(__file__), "sample_vx_index_doc.json"), "r") as fh: cls.index_document = BundleDocument(cls.replica, get_bundle_fqid()) cls.index_document.update(json.load(fh))
def setUpClass(cls): super().setUpClass() cls.app = ThreadedLocalServer() cls.app.start() dss.Config.set_config(dss.BucketConfig.TEST) with open( os.path.join(os.path.dirname(__file__), "sample_v3_index_doc.json"), "r") as fh: cls.index_document = json.load(fh)
def test_build_bundle_metadata_document(self): dss.Config.set_config(dss.BucketConfig.TEST) app = ThreadedLocalServer() app.start() self.addCleanup(app.shutdown) for replica in Replica: uuid, version = _upload_bundle(app, replica) key = f"bundles/{uuid}.{version}" with self.subTest("Build normal bundle metadata document", replica=replica): md = events.build_bundle_metadata_document(replica, key) self.assertIn("manifest", md) self.assertIn("files", md) self.assertIn("version", md['manifest']) self.assertIn("files", md['manifest']) self.assertEqual(md['event_type'], "CREATE") self.assertEqual(md['bundle_info']['uuid'], uuid) self.assertEqual(md['bundle_info']['version'], version) _tombstone_bundle(app, replica, uuid, version) with self.subTest("Build tombstoned bundle metadata document", replica=replica): md = events.build_bundle_metadata_document(replica, f"{key}.{TOMBSTONE_SUFFIX}") self.assertNotIn("manifest", md) self.assertEqual(md['event_type'], "TOMBSTONE") self.assertEqual(md['bundle_info']['uuid'], uuid) self.assertEqual(md['bundle_info']['version'], version)
def setUpClass(cls): cls.app = ThreadedLocalServer() cls.app.start() cls.bundles = {replica.name: list() for replica in Replica} with override_bucket_config(BucketConfig.TEST): for replica in Replica: pfx = f"flashflood-{replica.name}-{uuid4()}" os.environ[f'DSS_{replica.name.upper()}_FLASHFLOOD_PREFIX_READ'] = pfx os.environ[f'DSS_{replica.name.upper()}_FLASHFLOOD_PREFIX_WRITE'] = pfx for _ in range(3): uuid, version = _upload_bundle(cls.app, replica) cls.bundles[replica.name].append((uuid, version)) events.record_event_for_bundle(replica, f"bundles/{uuid}.{version}", use_version_for_timestamp=True)
def test_retry(self): with ThreadedLocalServer(TestHandler): expected_recount = 3 retry_count = dict(count=0) class TestRetry(Retry): def increment(self, *args, **kwargs): retry_count['count'] += 1 return super().increment(*args, **kwargs) with http_session( retry=TestRetry(total=expected_recount - 1, status_forcelist=[500], allowed_methods=["GET"])) as http: try: http.get("http://localhost:8000") except requests.exceptions.RetryError: pass self.assertEqual(expected_recount, retry_count['count'])
def setUpClass(cls): cls.app = ThreadedLocalServer() cls.app.start() cls.s3_file_uuid, cls.s3_file_version = cls.upload_file(cls.app, {"foo": 1}, replica='aws') cls.s3_col_file_item = dict(type="file", uuid=cls.s3_file_uuid, version=cls.s3_file_version) cls.s3_col_ptr_item = dict(type="foo", uuid=cls.s3_file_uuid, version=cls.s3_file_version, fragment="/foo") cls.gs_file_uuid, cls.gs_file_version = cls.upload_file(cls.app, {"foo": 1}, replica='gcp') cls.gs_col_file_item = dict(type="file", uuid=cls.gs_file_uuid, version=cls.gs_file_version) cls.gs_col_ptr_item = dict(type="foo", uuid=cls.gs_file_uuid, version=cls.gs_file_version, fragment="/foo") cls.contents = [cls.s3_col_file_item] * 8 + [cls.s3_col_ptr_item] * 8 cls.uuid, cls.version = cls._put(cls, cls.contents) cls.invalid_ptr = dict(type="foo", uuid=cls.s3_file_uuid, version=cls.s3_file_version, fragment="/xyz") cls.paging_test_replicas = ('aws', 'gcp') with open(os.environ['GOOGLE_APPLICATION_CREDENTIALS'], 'r') as fh: cls.owner_email = json.loads(fh.read())['client_email']
def setUpClass(cls): cls.app = ThreadedLocalServer() cls.app.start()
def setUpClass(cls): with open(get_env('GOOGLE_APPLICATION_CREDENTIALS'), "r") as fh: cls.owner = json.loads(fh.read())['client_email'] cls.app = ThreadedLocalServer(handler_cls=MyHandlerClass) cls.app.start() cls.s3 = boto3.client('s3')
def setUpClass(cls): cls.app = ThreadedLocalServer() cls.app.start() cls.flask_app = flask.Flask(__name__)
def setUpClass(cls): cls.app = ThreadedLocalServer() cls.app.start() cls.app._chalice_app._override_exptime_seconds = 15.0