def workflow_default(c: Composition) -> None: c.start_and_wait_for_tcp(services=["localstack"]) for version in CONFLUENT_PLATFORM_VERSIONS: print(f"==> Testing Confluent Platform {version}") confluent_platform_services = [ Zookeeper(tag=version), Kafka(tag=version), SchemaRegistry(tag=version), ] with c.override(*confluent_platform_services): c.start_and_wait_for_tcp(services=[ "zookeeper", "kafka", "schema-registry", "materialized" ]) c.wait_for_materialized() c.run("testdrive", "kafka-matrix.td", "testdrive/kafka-*.td") c.kill( "zookeeper", "kafka", "schema-registry", "materialized", ) c.rm( "zookeeper", "kafka", "schema-registry", "materialized", "testdrive", destroy_volumes=True, ) c.rm_volumes("mzdata", "pgdata", force=True)
class RowsJoinOuter(Generator): COUNT = 1_000_000 @classmethod def body(cls) -> None: print( f"> CREATE MATERIALIZED VIEW v1 AS SELECT generate_series AS f1, generate_series AS f2 FROM (SELECT * FROM generate_series(1, {cls.COUNT}));" ) print( f"> SELECT COUNT(*) FROM v1 AS a1 LEFT JOIN v1 AS a2 USING (f1);") print(f"{cls.COUNT}") SERVICES = [ Zookeeper(), Kafka(), SchemaRegistry(), Materialized( memory="8G", options="--persistent-user-tables --persistent-kafka-sources"), Testdrive(), ] def workflow_default(c: Composition) -> None: c.start_and_wait_for_tcp( services=["zookeeper", "kafka", "schema-registry", "materialized"]) with tempfile.NamedTemporaryFile(mode="w", dir=c.path) as tmp: with contextlib.redirect_stdout(tmp): [cls.generate() for cls in Generator.__subclasses__()]
from materialize.mzcompose import Composition, WorkflowArgumentParser from materialize.mzcompose.services import ( Kafka, Materialized, SchemaRegistry, Testdrive, Toxiproxy, Zookeeper, ) # Pick a non-default port to make sure nothing is accidentally going around the proxy KAFKA_SINK_PORT = 9091 SERVICES = [ Zookeeper(), Kafka(port=KAFKA_SINK_PORT), SchemaRegistry(kafka_servers=[("kafka", f"{KAFKA_SINK_PORT}")]), Materialized(), Toxiproxy(), Testdrive(kafka_url="toxiproxy:9093"), ] # # Test the kafka sink resumption logic # def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: c.start_and_wait_for_tcp(services=[ "zookeeper", "kafka", "schema-registry", "materialized", "toxiproxy" ]) c.wait_for_materialized()
# by the Apache License, Version 2.0. import time from materialize.mzcompose import Composition from materialize.mzcompose.services import ( Kafka, Materialized, SchemaRegistry, Testdrive, Zookeeper, ) SERVICES = [ Zookeeper(), Kafka(name="kafka1", broker_id=1, offsets_topic_replication_factor=2), Kafka(name="kafka2", broker_id=2, offsets_topic_replication_factor=2), Kafka(name="kafka3", broker_id=3, offsets_topic_replication_factor=2), SchemaRegistry( kafka_servers=[("kafka1", "9092"), ("kafka2", "9092"), ("kafka3", "9092")] ), Materialized(), Testdrive( entrypoint=[ "testdrive", "--schema-registry-url=http://schema-registry:8081", "--materialized-url=postgres://materialize@materialized:6875", "--kafka-option=acks=all", "--seed=1", ] ),
], volumes=["./sasl.jaas.config:/etc/zookeeper/sasl.jaas.config"], ), Kafka( environment=[ # "KAFKA_INTER_BROKER_LISTENER_NAME=SSL", "KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181", "KAFKA_SASL_ENABLED_MECHANISMS=PLAIN", "KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL=PLAIN", "KAFKA_SSL_KEYSTORE_FILENAME=kafka.keystore.jks", "KAFKA_SSL_KEYSTORE_CREDENTIALS=cert_creds", "KAFKA_SSL_KEY_CREDENTIALS=cert_creds", "KAFKA_SSL_TRUSTSTORE_FILENAME=kafka.truststore.jks", "KAFKA_SSL_TRUSTSTORE_CREDENTIALS=cert_creds", "KAFKA_SSL_CLIENT_AUTH=required", "KAFKA_SECURITY_INTER_BROKER_PROTOCOL=SASL_SSL", "KAFKA_OPTS=-Djava.security.auth.login.config=/etc/kafka/sasl.jaas.config", # Standard options we don't want to overwrite! "KAFKA_MIN_INSYNC_REPLICAS=1", "KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=1", "KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=1", ], listener_type="SASL_SSL", volumes=[ "secrets:/etc/kafka/secrets", "./sasl.jaas.config:/etc/kafka/sasl.jaas.config", ], ), SchemaRegistry( environment=[ "SCHEMA_REGISTRY_KAFKASTORE_TIMEOUT_MS=10000",
from materialize.mzcompose import Composition from materialize.mzcompose.services import ( Kafka, Materialized, Postgres, SchemaRegistry, Testdrive, Zookeeper, ) testdrive_no_reset = Testdrive(name="testdrive_no_reset", no_reset=True) SERVICES = [ Zookeeper(), Kafka(auto_create_topics=True), SchemaRegistry(), Materialized(), Testdrive(), testdrive_no_reset, Postgres(), ] def workflow_github_8021(c: Composition) -> None: c.up("materialized") c.wait_for_materialized("materialized") c.run("testdrive", "github-8021.td") # Ensure MZ can boot c.kill("materialized")
SERVICES = [ TestCerts(), Zookeeper(), Kafka( depends_on=["zookeeper", "test-certs"], environment=[ # Default "KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181", "KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false", "KAFKA_MIN_INSYNC_REPLICAS=1", "KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=1", "KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=1", "KAFKA_MESSAGE_MAX_BYTES=15728640", "KAFKA_REPLICA_FETCH_MAX_BYTES=15728640", # For this test "KAFKA_SSL_KEYSTORE_FILENAME=kafka.keystore.jks", "KAFKA_SSL_KEYSTORE_CREDENTIALS=cert_creds", "KAFKA_SSL_KEY_CREDENTIALS=cert_creds", "KAFKA_SSL_TRUSTSTORE_FILENAME=kafka.truststore.jks", "KAFKA_SSL_TRUSTSTORE_CREDENTIALS=cert_creds", "KAFKA_SSL_CLIENT_AUTH=required", "KAFKA_SECURITY_INTER_BROKER_PROTOCOL=SSL", ], listener_type="SSL", volumes=["secrets:/etc/kafka/secrets"], ), SchemaRegistry( depends_on=["kafka", "zookeeper", "test-certs"], environment=[ "SCHEMA_REGISTRY_KAFKASTORE_TIMEOUT_MS=10000",
def ssl_services() -> Tuple[Kafka, SchemaRegistry, Testdrive]: """sets""" kafka = Kafka( depends_on=["zookeeper", "test-certs"], environment=[ # Default "KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181", "KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false", "KAFKA_MIN_INSYNC_REPLICAS=1", "KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=1", "KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=1", "KAFKA_MESSAGE_MAX_BYTES=15728640", "KAFKA_REPLICA_FETCH_MAX_BYTES=15728640", # For this test "KAFKA_SSL_KEYSTORE_FILENAME=kafka.keystore.jks", "KAFKA_SSL_KEYSTORE_CREDENTIALS=cert_creds", "KAFKA_SSL_KEY_CREDENTIALS=cert_creds", "KAFKA_SSL_TRUSTSTORE_FILENAME=kafka.truststore.jks", "KAFKA_SSL_TRUSTSTORE_CREDENTIALS=cert_creds", "KAFKA_SSL_CLIENT_AUTH=required", "KAFKA_SECURITY_INTER_BROKER_PROTOCOL=SSL", ], listener_type="SSL", volumes=["secrets:/etc/kafka/secrets"], ) schema_registry = SchemaRegistry( depends_on=["kafka", "zookeeper", "test-certs"], environment=[ "SCHEMA_REGISTRY_KAFKASTORE_TIMEOUT_MS=10000", "SCHEMA_REGISTRY_HOST_NAME=schema-registry", "SCHEMA_REGISTRY_LISTENERS=https://0.0.0.0:8081", "SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL=zookeeper:2181", "SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL=SSL", "SCHEMA_REGISTRY_KAFKASTORE_SSL_KEYSTORE_LOCATION=/etc/schema-registry/secrets/schema-registry.keystore.jks", "SCHEMA_REGISTRY_SSL_KEYSTORE_LOCATION=/etc/schema-registry/secrets/schema-registry.keystore.jks", "SCHEMA_REGISTRY_KAFKASTORE_SSL_KEYSTORE_PASSWORD=mzmzmz", "SCHEMA_REGISTRY_SSL_KEYSTORE_PASSWORD=mzmzmz", "SCHEMA_REGISTRY_KAFKASTORE_SSL_KEY_PASSWORD=mzmzmz", "SCHEMA_REGISTRY_SSL_KEY_PASSWORD=mzmzmz", "SCHEMA_REGISTRY_KAFKASTORE_SSL_TRUSTSTORE_LOCATION=/etc/schema-registry/secrets/schema-registry.truststore.jks", "SCHEMA_REGISTRY_SSL_TRUSTSTORE_LOCATION=/etc/schema-registry/secrets/schema-registry.truststore.jks", "SCHEMA_REGISTRY_KAFKASTORE_SSL_TRUSTSTORE_PASSWORD=mzmzmz", "SCHEMA_REGISTRY_SSL_TRUSTSTORE_PASSWORD=mzmzmz", "SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL=https", "SCHEMA_REGISTRY_SSL_CLIENT_AUTH=true", ], volumes=[ "secrets:/etc/schema-registry/secrets", ], bootstrap_server_type="SSL", ) testdrive = Testdrive( entrypoint=[ "bash", "-c", "cp /share/secrets/ca.crt /usr/local/share/ca-certificates/ca.crt && " "update-ca-certificates && " "testdrive " "--kafka-addr=kafka:9092 " "--schema-registry-url=https://schema-registry:8081 " "--materialized-url=postgres://materialize@materialized:6875 " "--cert=/share/secrets/producer.p12 " "--cert-password=mzmzmz " "--ccsr-password=sekurity " "--ccsr-username=materialize " '"$$@"', ], volumes_extra=["secrets:/share/secrets"], # Required to install root certs above propagate_uid_gid=False, validate_catalog=False, ) return (kafka, schema_registry, testdrive)
Testdrive, Zookeeper, ) # All released Materialize versions, in order from most to least recent. all_versions = util.known_materialize_versions() # The `materialized` options that are valid only at or above a certain version. mz_options = {Version.parse("0.9.2"): "--persistent-user-tables"} SERVICES = [ TestCerts(), Zookeeper(), Kafka( # for some reason docker-compose wants kafka to be setup # with the same volumes when overriden depends_on=["zookeeper", "test-certs"], volumes=["secrets:/etc/kafka/secrets"], ), SchemaRegistry( depends_on=["kafka", "zookeeper", "test-certs"], volumes=[ "secrets:/etc/schema-registry/secrets", ], ), Postgres(), Materialized( options=" ".join(mz_options.values()), environment=[ "SSL_KEY_PASSWORD=mzmzmz", ], volumes_extra=["secrets:/share/secrets"],