def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: """Run testdrive.""" parser.add_argument( "--redpanda", action="store_true", help="run against Redpanda instead of the Confluent Platform", ) parser.add_argument( "--aws-region", help="run against the specified AWS region instead of localstack", ) parser.add_argument( "--workers", type=int, metavar="N", help="set the number of materialized dataflow workers", ) parser.add_argument( "--persistent-user-tables", action="store_true", help="enable the --persistent-user-tables materialized option", ) parser.add_argument( "files", nargs="*", default=["*.td", "esoteric/*.td"], help="run against the specified files", ) args = parser.parse_args() if not args.redpanda and Arch.host() == Arch.AARCH64: ui.warn( "Running the Confluent Platform in Docker on ARM-based machines is " "nearly unusably slow. Consider using Redpanda instead (--redpanda) " "or running tests without mzcompose.") dependencies = ["materialized"] if args.redpanda: dependencies += ["redpanda"] else: dependencies += ["zookeeper", "kafka", "schema-registry"] materialized = Materialized( workers=args.workers, options=["--persistent-user-tables"] if args.persistent_user_tables else [], ) testdrive = Testdrive( forward_buildkite_shard=True, entrypoint_extra=[f"--aws-region={args.aws_region}"] if args.aws_region else ["--aws-endpoint=http://localstack:4566"], ) with c.override(materialized, testdrive): c.start_and_wait_for_tcp(services=dependencies) c.wait_for_materialized("materialized") c.run("testdrive-svc", *args.files) c.kill("materialized")
def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: """Run testdrive.""" parser.add_argument( "--redpanda", action="store_true", help="run against Redpanda instead of the Confluent Platform", ) parser.add_argument( "--aws-region", help="run against the specified AWS region instead of localstack", ) parser.add_argument( "--kafka-default-partitions", type=int, metavar="N", help="set the default number of kafka partitions per topic", ) parser.add_argument( "files", nargs="*", default=["*.td"], help="run against the specified files", ) args = parser.parse_args() if not args.redpanda and Arch.host() == Arch.AARCH64: ui.warn( "Running the Confluent Platform in Docker on ARM-based machines is " "nearly unusably slow. Consider using Redpanda instead (--redpanda) " "or running tests without mzcompose." ) dependencies = ["materialized"] if args.redpanda: dependencies += ["redpanda"] else: dependencies += ["zookeeper", "kafka", "schema-registry"] if args.aws_region is None: dependencies += ["localstack"] testdrive = Testdrive( forward_buildkite_shard=True, kafka_default_partitions=args.kafka_default_partitions, aws_region=args.aws_region, validate_postgres_stash=True, ) with c.override(testdrive): c.start_and_wait_for_tcp(services=dependencies) c.wait_for_materialized("materialized") try: junit_report = ci_util.junit_report_filename(c.name) c.run("testdrive", f"--junit-report={junit_report}", *args.files) finally: ci_util.upload_junit_report( "testdrive", Path(__file__).parent / junit_report )
def check_docker_resource_limits(self) -> None: output = self.capture([ "docker", "system", "info", "--format", "{{.MemTotal}} {{.NCPU}}" ]) [mem, ncpus] = [int(field) for field in output.split()] if mem < RECOMMENDED_MIN_MEM: ui.warn( f"Docker only has {naturalsize(mem, binary=True)} of memory available. " f"We recommend at least {naturalsize(RECOMMENDED_MIN_MEM, binary=True)} of memory. " "See https://materialize.com/docs/third-party/docker/.") if ncpus < RECOMMENDED_MIN_CPUS: ui.warn(f"Docker only has {ncpus} CPU available. " f"We recommend at least {RECOMMENDED_MIN_CPUS} CPUs. " "See https://materialize.com/docs/third-party/docker/.")
def _handle_lingering_services(kill: bool = False) -> None: uid = os.getuid() for proc in psutil.process_iter(): try: if proc.name() in REQUIRED_SERVICES: if proc.uids().real != uid: print( f"Ignoring {proc.name()} process with different UID (PID {proc.pid}, likely running in Docker)" ) elif kill: print( f"Killing orphaned {proc.name()} process (PID {proc.pid})" ) proc.kill() else: ui.warn( f"Existing {proc.name()} process (PID {proc.pid}) will be reused" ) except psutil.NoSuchProcess: continue