def get_model_cache(config): """ Returns a data model cache matching the given configuration. """ cache_config = config.get("DATA_MODEL_CACHE_CONFIG", {}) engine = cache_config.get("engine", "noop") if engine == "noop": return NoopDataModelCache() if engine == "inmemory": return InMemoryDataModelCache() if engine == "memcached": endpoint = cache_config.get("endpoint", None) if endpoint is None: raise Exception("Missing `endpoint` for memcached model cache configuration") timeout = cache_config.get("timeout") connect_timeout = cache_config.get("connect_timeout") predisconnect = cache_config.get("predisconnect_from_db") cache = MemcachedModelCache(endpoint, timeout=timeout, connect_timeout=connect_timeout) if predisconnect: cache = DisconnectWrapper(cache, config) return cache raise Exception("Unknown model cache engine `%s`" % engine)
def test_auth_with_user_creds_set_session_token(self): cache_config = app.config.get("DATA_MODEL_CACHE_CONFIG", {}) cache = InMemoryDataModelCache(cache_config) with mock.patch("proxy.model_cache", cache): with HTTMock(docker_registry_mock): proxy = Proxy(self.auth_config, "library/postgres") self.assertEqual(proxy._session.headers.get("Authorization"), f"Bearer {USER_TOKEN}")
def test_get_cached_repo_blob(registry_model): model_cache = InMemoryDataModelCache() repository_ref = registry_model.lookup_repository("devtable", "simple") latest_tag = registry_model.get_repo_tag(repository_ref, "latest") manifest = registry_model.get_manifest_for_tag(latest_tag) blobs = registry_model.get_manifest_local_blobs(manifest, include_placements=True) assert blobs blob = blobs[0] # Load a blob to add it to the cache. found = registry_model.get_cached_repo_blob(model_cache, "devtable", "simple", blob.digest) assert found.digest == blob.digest assert found.uuid == blob.uuid assert found.compressed_size == blob.compressed_size assert found.uncompressed_size == blob.uncompressed_size assert found.uploading == blob.uploading assert found.placements == blob.placements # Disconnect from the database by overwriting the connection. def fail(x, y): raise SomeException("Not connected!") with patch( "data.registry_model.registry_pre_oci_model.model.blob.get_repository_blob_by_digest", fail): with patch( "data.registry_model.registry_oci_model.model.oci.blob.get_repository_blob_by_digest", fail, ): # Make sure we can load again, which should hit the cache. cached = registry_model.get_cached_repo_blob( model_cache, "devtable", "simple", blob.digest) assert cached.digest == blob.digest assert cached.uuid == blob.uuid assert cached.compressed_size == blob.compressed_size assert cached.uncompressed_size == blob.uncompressed_size assert cached.uploading == blob.uploading assert cached.placements == blob.placements # Try another blob, which should fail since the DB is not connected and the cache # does not contain the blob. with pytest.raises(SomeException): registry_model.get_cached_repo_blob(model_cache, "devtable", "simple", "some other digest")
def get_model_cache(config): """ Returns a data model cache matching the given configuration. """ cache_config = config.get("DATA_MODEL_CACHE_CONFIG", {}) engine = cache_config.get("engine", "noop") if engine == "noop": return NoopDataModelCache(cache_config) if engine == "inmemory": return InMemoryDataModelCache(cache_config) if engine == "memcached": endpoint = cache_config.get("endpoint", None) if endpoint is None: raise Exception( "Missing `endpoint` for memcached model cache configuration") timeout = cache_config.get("timeout") connect_timeout = cache_config.get("connect_timeout") predisconnect = cache_config.get("predisconnect_from_db") cache = MemcachedModelCache(cache_config, endpoint, timeout=timeout, connect_timeout=connect_timeout) if predisconnect: cache = DisconnectWrapper(cache, config) return cache if engine == "redis": host = cache_config.get("host", None) if host is None: raise Exception( "Missing `host` for Redis model cache configuration") return RedisDataModelCache( cache_config, host=host, port=cache_config.get("port", 6379), password=cache_config.get("password", None), db=cache_config.get("db", 0), ca_cert=cache_config.get("ca_cert", None), ssl=cache_config.get("ssl", False), ) raise Exception("Unknown model cache engine `%s`" % engine)
def test_lookup_active_repository_tags(test_cached, oci_model): repository_ref = oci_model.lookup_repository("devtable", "simple") latest_tag = oci_model.get_repo_tag(repository_ref, "latest") manifest = oci_model.get_manifest_for_tag(latest_tag) tag_count = 500 # Create a bunch of tags. tags_expected = set() for index in range(0, tag_count): tags_expected.add("somenewtag%s" % index) oci_model.retarget_tag( repository_ref, "somenewtag%s" % index, manifest, storage, docker_v2_signing_key ) assert tags_expected # List the tags. tags_found = set() tag_id = None while True: if test_cached: model_cache = InMemoryDataModelCache() tags = oci_model.lookup_cached_active_repository_tags( model_cache, repository_ref, tag_id, 11 ) else: tags = oci_model.lookup_active_repository_tags(repository_ref, tag_id, 11) assert len(tags) <= 11 for tag in tags[0:10]: assert tag.name not in tags_found if tag.name in tags_expected: tags_found.add(tag.name) tags_expected.remove(tag.name) if len(tags) < 11: break tag_id = tags[10].id # Make sure we've found all the tags. assert tags_found assert not tags_expected
def get_model_cache(config): """ Returns a data model cache matching the given configuration. """ cache_config = config.get('DATA_MODEL_CACHE_CONFIG', {}) engine = cache_config.get('engine', 'noop') if engine == 'noop': return NoopDataModelCache() if engine == 'inmemory': return InMemoryDataModelCache() if engine == 'memcached': endpoint = cache_config.get('endpoint', None) if endpoint is None: raise Exception( 'Missing `endpoint` for memcached model cache configuration') timeout = cache_config.get('timeout') connect_timeout = cache_config.get('connect_timeout') return MemcachedModelCache(endpoint, timeout=timeout, connect_timeout=connect_timeout) raise Exception('Unknown model cache engine `%s`' % engine)