def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: """Runs the dbt adapter test suite against Materialize in various configurations.""" parser.add_argument( "filter", nargs="?", default="", help="limit to test cases matching filter" ) args = parser.parse_args() for test_case in test_cases: if args.filter in test_case.name: print(f"> Running test case {test_case.name}") materialized = Materialized( options=test_case.materialized_options, image=test_case.materialized_image, depends_on=["test-certs"], volumes=["secrets:/secrets"], ) with c.override(materialized): c.up("materialized") c.wait_for_tcp(host="materialized", port=6875) c.run( "dbt-test", "pytest", "dbt-materialize/test", env_extra=test_case.dbt_env, )
def run_test(c: Composition, materialized: str, env: Dict[str, str]) -> None: c.up(materialized) c.wait_for_tcp(host=materialized, port=6875) c.run( "dbt-test", "pytest", "dbt-materialize/test", env=env, )
def initialize(c: Composition) -> None: c.up("materialized", "postgres", "toxiproxy") c.wait_for_materialized() c.wait_for_postgres() c.wait_for_tcp(host="toxiproxy", port=8474) # We run configure-postgres.td only once for all workflows as # it contains CREATE USER that is not indempotent c.run("testdrive-svc", "configure-postgres.td")
def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: """Runs the dbt adapter test suite against Materialize in various configurations.""" parser.add_argument("filter", nargs="?", default="", help="limit to test cases matching filter") args = parser.parse_args() for test_case in test_cases: if args.filter in test_case.name: print(f"> Running test case {test_case.name}") materialized = Materialized( options=test_case.materialized_options, image=test_case.materialized_image, depends_on=["test-certs"], volumes_extra=["secrets:/secrets"], ) with c.test_case(test_case.name): with c.override(materialized): c.down() c.start_and_wait_for_tcp( services=["zookeeper", "kafka", "schema-registry"]) c.up("materialized") c.wait_for_tcp(host="materialized", port=6875) c.run( "dbt-test", "pytest", "dbt-materialize/test", env_extra={ "DBT_HOST": "materialized", "KAFKA_ADDR": "kafka:9092", "SCHEMA_REGISTRY_URL": "http://schema-registry:8081", **test_case.dbt_env, }, )
def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: """Run CH-benCHmark without any load on Materialize""" # Parse arguments. parser.add_argument("--wait", action="store_true", help="wait for the load generator to exit") args, unknown_args = parser.parse_known_args() # Start Materialize. c.up("materialized") c.wait_for_materialized() # Start MySQL and Debezium. c.up("mysql", "debezium") c.wait_for_tcp(host="mysql", port=3306) c.wait_for_tcp(host="debezium", port=8083) # Generate initial data. c.run( "chbench", "gen", "--config-file-path=/etc/chbenchmark/mz-default-mysql.cfg", "--warehouses=1", ) # Start Debezium. response = requests.post( f"http://localhost:{c.default_port('debezium')}/connectors", json={ "name": "mysql-connector", "config": { "connector.class": "io.debezium.connector.mysql.MySqlConnector", "database.hostname": "mysql", "database.port": "3306", "database.user": "******", "database.password": "******", "database.server.name": "debezium", "database.server.id": "1234", "database.history.kafka.bootstrap.servers": "kafka:9092", "database.history.kafka.topic": "mysql-history", "database.allowPublicKeyRetrieval": "true", "time.precision.mode": "connect", }, }, ) # Don't error if the connector already exists. if response.status_code != requests.codes.conflict: response.raise_for_status() # Run load generator. c.run( "chbench", "run", "--config-file-path=/etc/chbenchmark/mz-default-mysql.cfg", "--dsn=mysql", "--gen-dir=/var/lib/mysql-files", "--analytic-threads=0", "--transactional-threads=1", "--run-seconds=86400", "--mz-sources", *unknown_args, detach=not args.wait, )
def start_everything(c: Composition) -> None: c.up("kafka", "materialized") c.wait_for_tcp(host="kafka", port=9092) c.wait_for_materialized()