コード例 #1
0
    def run(self, args: argparse.Namespace) -> None:
        composition = load_composition(args)

        service = composition.compose["services"].get(args.service)
        if not service:
            raise UIError(f"unknown service {args.service!r}")

        image = service["image"].split(":")[0]
        if image != "materialize/materialized":
            raise UIError(
                f"cannot connect SQL shell to non-materialized service {args.service!r}"
            )

        # Attempting to load the default port will produce a nice error message
        # if the service isn't running or isn't exposing a port.
        composition.default_port(args.service)

        deps = composition.repo.resolve_dependencies(
            [composition.repo.images["psql"]])
        deps.acquire()
        deps["psql"].run(
            [
                "-h",
                service.get("hostname", args.service),
                "-p",
                "6875",
                "-U",
                "materialize",
                "materialize",
            ],
            docker_args=[
                "--interactive", f"--network={composition.name}_default"
            ],
        )
コード例 #2
0
    def handle_composition(self, args: argparse.Namespace,
                           composition: mzcompose.Composition) -> None:
        if args.workflow not in composition.workflows:
            # Restart any dependencies whose definitions have changed. This is
            # Docker Compose's default behavior for `up`, but not for `run`,
            # which is a constant irritation that we paper over here. The trick,
            # taken from Buildkite's Docker Compose plugin, is to run an `up`
            # command that requests zero instances of the requested service.
            if args.workflow:
                composition.invoke(
                    "up",
                    "-d",
                    "--scale",
                    f"{args.workflow}=0",
                    args.workflow,
                )
            super().handle_composition(args, composition)
        else:
            # The user has specified a workflow rather than a service. Run the
            # workflow instead of Docker Compose.
            if args.unknown_args:
                bad_arg = args.unknown_args[0]
            elif args.unknown_subargs[0].startswith("-"):
                bad_arg = args.unknown_subargs[0]
            else:
                bad_arg = None
            if bad_arg:
                raise UIError(
                    f"unknown option {bad_arg!r}",
                    hint=f"if {bad_arg!r} is a valid Docker Compose option, "
                    f"it can't be used when running {args.workflow!r}, because {args.workflow!r} "
                    "is a custom mzcompose workflow, not a Docker Compose service",
                )

            # Run the workflow inside of a test case so that we get some basic
            # test analytics, even if the workflow doesn't define more granular
            # test cases.
            with composition.test_case(f"workflow-{args.workflow}"):
                composition.workflow(args.workflow, *args.unknown_subargs[1:])

            # Upload test report to Buildkite Test Analytics.
            junit_suite = junit_xml.TestSuite(composition.name)
            for (name, result) in composition.test_results.items():
                test_case = junit_xml.TestCase(name, composition.name,
                                               result.duration)
                if result.error:
                    test_case.add_error_info(message=result.error)
                junit_suite.test_cases.append(test_case)
            junit_report = ci_util.junit_report_filename("mzcompose")
            with junit_report.open("w") as f:
                junit_xml.to_xml_report_file(f, [junit_suite])
            ci_util.upload_junit_report("mzcompose", junit_report)

            if any(result.error
                   for result in composition.test_results.values()):
                raise UIError("at least one test case failed")
コード例 #3
0
    def _munge_services(
        self, services: List[Tuple[str, dict]]
    ) -> mzbuild.DependencySet:
        images = []

        for name, config in services:
            # Remember any mzbuild references.
            if "mzbuild" in config:
                image_name = config["mzbuild"]
                if image_name not in self.repo.images:
                    raise UIError(f"mzcompose: unknown image {image_name}")
                image = self.repo.images[image_name]
                images.append(image)

            if "propagate_uid_gid" in config:
                if config["propagate_uid_gid"]:
                    config["user"] = f"{os.getuid()}:{os.getgid()}"
                del config["propagate_uid_gid"]

            ports = config.setdefault("ports", [])
            for i, port in enumerate(ports):
                if self.preserve_ports and not ":" in str(port):
                    # If preserving ports, bind the container port to the same
                    # host port, assuming the host port is available.
                    ports[i] = f"{port}:{port}"
                elif ":" in str(port) and not config.get("allow_host_ports", False):
                    # Raise an error for host-bound ports, unless
                    # `allow_host_ports` is `True`
                    raise UIError(
                        "programming error: disallowed host port in service {name!r}",
                        hint=f'Add `"allow_host_ports": True` to the service config to disable this check.',
                    )

            if "allow_host_ports" in config:
                config.pop("allow_host_ports")

            if self.repo.rd.coverage:
                # Emit coverage information to a file in a directory that is
                # bind-mounted to the "coverage" directory on the host. We
                # inject the configuration to all services for simplicity, but
                # this only have an effect if the service runs instrumented Rust
                # binaries.
                config.setdefault("environment", []).append(
                    f"LLVM_PROFILE_FILE=/coverage/{name}-%m.profraw"
                )
                config.setdefault("volumes", []).append("./coverage:/coverage")

        # Determine mzbuild specs and inject them into services accordingly.
        deps = self.repo.resolve_dependencies(images)
        for _name, config in services:
            if "mzbuild" in config:
                config["image"] = deps[config["mzbuild"]].spec()
                del config["mzbuild"]

        return deps
コード例 #4
0
 def capture(self, args: List[str], stderr_too: bool = False) -> str:
     try:
         return spawn.capture(args, stderr_too=stderr_too, unicode=True)
     except subprocess.CalledProcessError as e:
         # Print any captured output, since it probably hints at the problem.
         print(e.output, file=sys.stderr, end="")
         raise UIError(
             f"running `{args[0]}` failed (exit status {e.returncode})")
     except FileNotFoundError:
         raise UIError(f"unable to launch `{args[0]}`",
                       hint=f"is {args[0]} installed?")
コード例 #5
0
 def handle_composition(self, args: argparse.Namespace,
                        composition: mzcompose.Composition) -> None:
     if args.workflow not in composition.workflows:
         # Restart any dependencies whose definitions have changed. This is
         # Docker Compose's default behavior for `up`, but not for `run`,
         # which is a constant irritation that we paper over here. The trick,
         # taken from Buildkite's Docker Compose plugin, is to run an `up`
         # command that requests zero instances of the requested service.
         if args.workflow:
             composition.invoke(
                 "up",
                 "-d",
                 "--scale",
                 f"{args.workflow}=0",
                 args.workflow,
             )
         super().handle_composition(args, composition)
     else:
         # The user has specified a workflow rather than a service. Run the
         # workflow instead of Docker Compose.
         if args.unknown_args:
             bad_arg = args.unknown_args[0]
         elif args.unknown_subargs[0].startswith("-"):
             bad_arg = args.unknown_subargs[0]
         else:
             bad_arg = None
         if bad_arg:
             raise UIError(
                 f"unknown option {bad_arg!r}",
                 hint=f"if {bad_arg!r} is a valid Docker Compose option, "
                 f"it can't be used when running {args.workflow!r}, because {args.workflow!r} "
                 "is a custom mzcompose workflow, not a Docker Compose service",
             )
         composition.workflow(args.workflow, *args.unknown_subargs[1:])
コード例 #6
0
    def _resolve_mzbuild_references(self) -> None:
        # Resolve all services that reference an `mzbuild` image to a specific
        # `image` reference.
        for name, config in self.compose["services"].items():
            if "mzbuild" in config:
                image_name = config["mzbuild"]

                if image_name not in self.repo.images:
                    raise UIError(f"mzcompose: unknown image {image_name}")

                image = self.repo.images[image_name]
                override_tag = os.getenv(
                    f"MZBUILD_{image.env_var_name()}_TAG", self.default_tag
                )
                if override_tag is not None:
                    config["image"] = image.docker_name(override_tag)
                    print(
                        f"mzcompose: warning: overriding {image_name} image to tag {override_tag}",
                        file=sys.stderr,
                    )
                    del config["mzbuild"]
                else:
                    self.images.append(image)

        deps = self.repo.resolve_dependencies(self.images)
        for config in self.compose["services"].values():
            if "mzbuild" in config:
                config["image"] = deps[config["mzbuild"]].spec()
                del config["mzbuild"]
コード例 #7
0
def wait(
    condition: str, resource: str, timeout_secs: int = 300, context: str = "kind-kind"
) -> None:
    cmd = [
        "kubectl",
        "wait",
        "--for",
        condition,
        resource,
        "--timeout",
        f"{timeout_secs}s",
        "--context",
        context,
    ]
    ui.progress(f'waiting for {" ".join(cmd)} ... ')

    error = None
    for remaining in ui.timeout_loop(timeout_secs, tick=0.1):
        try:
            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode(
                "ascii"
            )
            # output is:
            # - an empty string when a 'delete' condition is satisfied
            # - 'condition met' for all other conditions
            if len(output) == 0 or "condition met" in output:
                ui.progress("success!", finish=True)
                return
        except subprocess.CalledProcessError as e:
            print(e, e.output)
            error = e

    ui.progress(finish=True)
    raise UIError(f"kubectl wait never returned 'condition met': {error}")
コード例 #8
0
    def invoke(self, *args: str, capture: bool = False) -> subprocess.CompletedProcess:
        """Invoke `docker-compose` on the rendered composition.

        Args:
            args: The arguments to pass to `docker-compose`.
            capture: Whether to capture the child's stdout stream.
        """
        print(f"$ docker-compose {' '.join(args)}", file=sys.stderr)

        self.file.seek(0)

        stdout = None
        if capture:
            stdout = subprocess.PIPE

        try:
            return subprocess.run(
                [
                    "docker-compose",
                    f"-f/dev/fd/{self.file.fileno()}",
                    "--project-directory",
                    self.path,
                    *args,
                ],
                close_fds=False,
                check=True,
                stdout=stdout,
                text=True,
            )
        except subprocess.CalledProcessError as e:
            if e.stdout:
                print(e.stdout)
            raise UIError(f"running docker-compose failed (exit status {e.returncode})")
コード例 #9
0
def new_rc(
    create_branch: Optional[str],
    checkout: Optional[str],
    affect_remote: bool,
    level: str,
) -> None:
    """Start a brand new release

    \b
    Arguments:
        level    Which part of the version to change:
                 * patch    - The Z in X.Y.Z
                 * weekly   - The Y in X.Y.Z
                 * major    - The X in X.Y.Z
                 * rc       - increases the N in -rcN, should only be used if
                              you need to create a second or greater release candidate
    """
    tag = get_latest_tag(fetch=True)
    new_version = None
    if level == "rc":
        if tag.prerelease is None or not tag.prerelease.startswith("rc"):
            raise UIError("Attempted to bump an rc version without starting an RC")
        next_rc = int(tag.prerelease[2:]) + 1
        new_version = tag.replace(prerelease=f"rc{next_rc}")
    elif level == "patch":
        new_version = tag.bump_patch().replace(prerelease="rc1")
    elif level == "weekly":
        new_version = tag.bump_minor().replace(prerelease="rc1")
    elif level == "major":
        new_version = tag.bump_major().replace(prerelease="rc1")
    assert new_version is not None

    release(new_version, checkout, create_branch, True, affect_remote)
コード例 #10
0
def _run_sql(url: str, sql: str) -> None:
    try:
        spawn.runv(["psql", "-At", url, "-c", sql])
    except Exception as e:
        raise UIError(
            f"unable to execute postgres statement: {e}",
            hint="Have you installed and configured PostgreSQL for passwordless authentication?",
        )
コード例 #11
0
 def run(cls, args: argparse.Namespace) -> None:
     repo = mzbuild.Repository.from_arguments(ROOT, args)
     errors = []
     for name in repo.compositions:
         errors += mzcompose.Composition.lint(repo, name)
     for error in sorted(errors):
         print(error)
     if errors:
         raise UIError("lint errors discovered")
コード例 #12
0
    def default_port(self, service: str) -> int:
        """Get the default public port for a service.

        Args:
            service: The name of a service in the composition.
        """
        ports = self.compose["services"][service]["ports"]
        if not ports:
            raise UIError(f"service f{service!r} does not expose any ports")
        private_port = str(ports[0]).split(":")[0]
        return self.port(service, private_port)
コード例 #13
0
    def test_case(self, name: str) -> Iterator[None]:
        """Execute a test case.

        This context manager provides a very lightweight testing framework. If
        the body of the context manager raises an exception, the test case is
        considered to have failed; otherwise it is considered to have succeeded.
        In either case the execution time and status of the test are recorded in
        `test_results`.

        Example:
            A simple workflow that executes a table-driven test:

            ```
            @dataclass
            class TestCase:
                name: str
                files: list[str]

            test_cases = [
                TestCase(name="short", files=["quicktests.td"]),
                TestCase(name="long", files=["longtest1.td", "longtest2.td"]),
            ]

            def workflow_default(c: Composition):
                for tc in test_cases:
                    with c.test_case(tc.name):
                        c.run("testdrive", *tc.files)
            ```

        Args:
            name: The name of the test case. Must be unique across the lifetime
                of a composition.
        """
        if name in self.test_results:
            raise UIError(f"test case {name} executed twice")
        ui.header(f"Running test case {name}")
        error = None
        start_time = time.time()
        try:
            yield
            ui.header(f"mzcompose: test case {name} succeeded")
        except Exception as e:
            error = str(e)
            if isinstance(e, UIError):
                print(f"mzcompose: test case {name} failed: {e}",
                      file=sys.stderr)
            else:
                print(f"mzcompose: test case {name} failed:", file=sys.stderr)
                traceback.print_exc()
        elapsed = time.time() - start_time
        self.test_results[name] = Composition.TestResult(elapsed, error)
コード例 #14
0
def change_line(fname: str, line_start: str, replacement: str) -> None:
    with open(fname, "r") as fh:
        content = fh.read().splitlines()

    changes = 0
    for i, line in enumerate(content):
        if line.startswith(line_start):
            content[i] = replacement
            changes += 1
    with open(fname, "w") as fh:
        fh.write("\n".join(content))
        fh.write("\n")

    if changes != 1:
        raise UIError(f"Found {changes} {line_start}s in {fname}")
コード例 #15
0
def _wait_for_pg(
    timeout_secs: int,
    query: str,
    dbname: str,
    port: int,
    host: str,
    user: str,
    password: str,
    print_result: bool,
    expected: Union[Iterable[Any], Literal["any"]],
) -> None:
    """Wait for a pg-compatible database (includes materialized)"""
    args = f"dbname={dbname} host={host} port={port} user={user} password={password}"
    ui.progress(f"waiting for {args} to handle {query!r}", "C")
    error = None
    for remaining in ui.timeout_loop(timeout_secs):
        try:
            conn = pg8000.connect(
                database=dbname,
                host=host,
                port=port,
                user=user,
                password=password,
                timeout=1,
            )
            # The default (autocommit = false) wraps everything in a transaction.
            conn.autocommit = True
            cur = conn.cursor()
            cur.execute(query)
            if expected == "any" and cur.rowcount == -1:
                ui.progress("success!", finish=True)
                return
            result = list(cur.fetchall())
            if expected == "any" or result == expected:
                if print_result:
                    say(f"query result: {result}")
                else:
                    ui.progress("success!", finish=True)
                return
            else:
                say(
                    f"host={host} port={port} did not return rows matching {expected} got: {result}"
                )
        except Exception as e:
            ui.progress(" " + str(int(remaining)))
            error = e
    ui.progress(finish=True)
    raise UIError(f"never got correct result for {args}: {error}")
コード例 #16
0
    def port(self, service: str, private_port: Union[int, str]) -> int:
        """Get the public port for a service's private port.

        Delegates to `docker-compose port`. See that command's help for details.

        Args:
            service: The name of a service in the composition.
            private_port: A private port exposed by the service.
        """
        proc = self.invoke("port", service, str(private_port), capture=True)
        if not proc.stdout.strip():
            raise UIError(
                f"service f{service!r} is not exposing port {private_port!r}",
                hint="is the service running?",
            )
        return int(proc.stdout.split(":")[1])
コード例 #17
0
    def run(self, args: argparse.Namespace) -> None:
        if args.help:
            output = self.capture(
                ["docker-compose", self.name, "--help"], stderr=subprocess.STDOUT
            )
            output = output.replace("docker-compose", "./mzcompose")
            output += "\nThis command is a wrapper around Docker Compose."
            if self.help_epilog:
                output += "\n"
                output += self.help_epilog
            print(output, file=sys.stderr)
            return

        # Make sure Docker Compose is new enough.
        output = (
            self.capture(
                ["docker-compose", "version", "--short"], stderr=subprocess.STDOUT
            )
            .strip()
            .strip("v")
        )
        version = tuple(int(i) for i in output.split("."))
        if version < MIN_COMPOSE_VERSION:
            raise UIError(
                f"unsupported docker-compose version v{output}",
                hint=f"minimum version allowed: v{'.'.join(str(p) for p in MIN_COMPOSE_VERSION)}",
            )

        composition = load_composition(args)
        ui.header("Collecting mzbuild images")
        for d in composition.dependencies:
            ui.say(d.spec())

        if self.runs_containers:
            if args.coverage:
                # If the user has requested coverage information, create the
                # coverage directory as the current user, so Docker doesn't create
                # it as root.
                (composition.path / "coverage").mkdir(exist_ok=True)
            self.check_docker_resource_limits()
            composition.dependencies.acquire()

            if "services" in composition.compose:
                composition.pull_if_variable(composition.compose["services"].keys())

        self.handle_composition(args, composition)
コード例 #18
0
 def wait_for_tcp(
     self,
     *,
     host: str = "localhost",
     port: Union[int, str],
     timeout_secs: int = 240,
 ) -> None:
     if isinstance(port, str):
         port = int(port.split(":")[0])
     ui.progress(f"waiting for {host}:{port}", "C")
     cmd = f"docker run --rm -t --network {self.name}_default ubuntu:focal-20210723".split()
     try:
         _check_tcp(cmd[:], host, port, timeout_secs)
     except subprocess.CalledProcessError:
         ui.progress(" error!", finish=True)
         raise UIError(f"unable to connect to {host}:{port}")
     else:
         ui.progress(" success!", finish=True)
コード例 #19
0
def update_versions_list(released_version: Version) -> None:
    """Update the doc config with the passed-in version"""
    today = date.today().strftime("%d %B %Y")
    toml_line = (
        f'  {{ name = "v{released_version}", date = "{today}", ' +
        'targets = ["x86_64-unknown-linux-gnu", "aarch64-unknown-linux-gnu", '
        + '"x86_64-apple-darwin", "aarch64-apple-darwin"] },\n')
    with open(USER_DOC_CONFIG) as fh:
        docs = fh.readlines()
    wrote_line = False
    with open(USER_DOC_CONFIG, "w") as fh:
        for line in docs:
            fh.write(line)
            if line == "versions = [\n":
                fh.write(toml_line)
                wrote_line = True
    if not wrote_line:
        raise UIError("Couldn't determine where to insert new version")
コード例 #20
0
    def wait_for_tcp(
        self,
        *,
        host: str = "localhost",
        port: int,
        timeout_secs: int = 240,
    ) -> None:
        ui.progress(f"waiting for {host}:{port}", "C")
        for remaining in ui.timeout_loop(timeout_secs):
            cmd = f"docker run --rm -t --network {self.name}_default ubuntu:focal-20210723".split()

            try:
                _check_tcp(cmd[:], host, port, timeout_secs)
            except subprocess.CalledProcessError:
                ui.progress(" {}".format(int(remaining)))
            else:
                ui.progress(" success!", finish=True)
                return

        ui.progress(" error!", finish=True)
        raise UIError(f"unable to connect to {host}:{port}")
コード例 #21
0
def load_composition(args: argparse.Namespace) -> mzcompose.Composition:
    """Loads the composition specified by the command-line arguments."""
    repo = mzbuild.Repository.from_arguments(ROOT, args)
    try:
        return mzcompose.Composition(repo,
                                     name=args.find or Path.cwd().name,
                                     preserve_ports=args.preserve_ports)
    except mzcompose.UnknownCompositionError as e:
        if args.find:
            hint = "available compositions:\n"
            for name in repo.compositions:
                hint += f"    {name}\n"
            e.set_hint(hint)
            raise e
        else:
            hint = "enter one of the following directories and run ./mzcompose:\n"
            for path in repo.compositions.values():
                hint += f"    {path.relative_to(Path.cwd())}\n"
            raise UIError(
                "directory does not contain an mzcompose.yml or mzcompose.py",
                hint,
            )
コード例 #22
0
def list_prs(recent_ref: Optional[str], ancestor_ref: Optional[str]) -> None:
    """
    List PRs between a range of refs

    If no refs are specified, then this will find the refs between the most
    recent tag and the previous semver tag (i.e. excluding RCs)
    """
    git.fetch()
    if recent_ref is None or ancestor_ref is None:
        tags = git.get_version_tags(fetch=False)
        if recent_ref is None:
            recent = tags[0]
            recent_ref = str(tags[0])
        else:
            recent = Version.parse(recent_ref)
        if ancestor_ref is None:
            for ref in tags[1:]:
                ancestor = ref
                if (
                    ancestor.major < recent.major
                    or ancestor.minor < recent.minor
                    or ancestor.patch < recent.patch
                ):
                    ancestor_ref = str(ref)
                    break

            ui.say(
                f"Using recent_ref={recent_ref}  ancestor_ref={ancestor_ref}",
            )

    commit_range = f"v{ancestor_ref}..v{recent_ref}"
    commits = spawn.capture(
        [
            "git",
            "log",
            "--pretty=format:%d %s",
            "--abbrev-commit",
            "--date=iso",
            commit_range,
            "--",
        ],
        unicode=True,
    )

    pattern = re.compile(r"^\s*\(refs/pullreqs/(\d+)|\(#(\d+)")
    prs = []
    found_ref = False
    for commit in commits.splitlines():
        if "build(deps)" in commit:
            continue

        match = pattern.search(commit)
        if match is not None:
            pr = match.group(1)
            if pr:
                found_ref = True
            else:
                pr = match.group(2)
            prs.append(pr)

    if not found_ref:
        ui.say(
            "WARNING: you probably don't have pullreqs configured for your repo",
        )
        ui.say(
            "Add the following line to the MaterializeInc/materialize remote section in your .git/config",
        )
        ui.say("  fetch = +refs/pull/*/head:refs/pullreqs/*")

    username = input("Enter your github username: "******"~/.config/materialize/dev-tools-access-token")

    try:
        with open(creds_path) as fh:
            token = fh.read().strip()
    except FileNotFoundError:
        raise UIError(
            f"""No developer tool api token at {creds_path!r}
    please create an access token at https://github.com/settings/tokens"""
        )

    def get(pr: str) -> Any:
        return requests.get(
            f"https://{username}:{token}@api.github.com/repos/MaterializeInc/materialize/pulls/{pr}",
            headers={
                "Accept": "application/vnd.github.v3+json",
            },
        ).json()

    collected = []
    with concurrent.futures.ThreadPoolExecutor(max_workers=10) as pool:
        futures = {pool.submit(get, pr): pr for pr in prs}
        for future in concurrent.futures.as_completed(futures):
            pr = futures[future]
            contents = future.result()
            try:
                url = contents["html_url"]
                title = contents["title"]
                collected.append((url, title))
            except KeyError:
                raise UIError(contents)
    for url, title in sorted(collected):
        print(url, title)
コード例 #23
0
def release(
    version: Version,
    checkout: Optional[str],
    create_branch: Optional[str],
    tag: bool,
    affect_remote: bool,
) -> None:
    """Update documents for a release and create tags

    If both `-b` and `-c` are specified, the checkout happens before the branch creation,
    meaning that the new branch is created on the target of `-c`.

    For example make release::

        mkrelease -b prepare-v0.1.2 -c v0.1.1-rc1 v0.1.2-dev

    Has the same git semantics as::

        git checkout -b prepare-v0.1.2 v0.1.1-rc1

    \b
    Arguments:
        version: The version to release. The `v` prefix is optional
    """
    if git.is_dirty():
        raise UIError("working directory is not clean, stash or commit your changes")

    the_tag = f"v{version}"
    confirm_version_is_next(version, affect_remote)

    if checkout is not None:
        git.checkout(checkout)
    if create_branch is not None:
        git.create_branch(create_branch)

    confirm_on_latest_rc(affect_remote)

    change_line(BIN_CARGO_TOML, "version", f'version = "{version}"')
    change_line(
        LICENSE,
        "Licensed Work:",
        f"Licensed Work:             Materialize Version {version}",
    )
    # Don't update the change date unless some code has changed
    if version.prerelease:
        future = four_years_hence()
        change_line(LICENSE, "Change Date", f"Change Date:               {future}")

    ui.say("Updating Cargo.lock")
    spawn.runv(["cargo", "check", "-p", "materialized"])
    spawn.runv(["cargo", "check", "-p", "materialized"])
    spawn.runv(["cargo", "check", "-p", "materialized", "--locked"])
    if tag:
        git.commit_all_changed(f"release: {the_tag}")
        git.tag_annotated(the_tag)
    else:
        git.commit_all_changed(f"Prepare next phase of development: {the_tag}")
        latest_tag = get_latest_tag(fetch=False)
        # we have made an actual release
        if latest_tag.prerelease is None and click.confirm(
            f"Update doc/user/config.toml marking v{latest_tag} as released"
        ):
            update_versions_list(latest_tag)
            git.commit_all_changed(f"Update released versions to include v{latest_tag}")

    matching = git.first_remote_matching("MaterializeInc/materialize")
    if tag:
        if matching is not None:
            spawn.runv(["git", "show", "HEAD"])
            if affect_remote and ui.confirm(
                f"\nWould you like to push the above changes as: git push {matching} {the_tag}"
            ):
                spawn.runv(["git", "push", matching, the_tag])
        else:
            ui.say("")
            ui.say(
                f"Next step is to push {the_tag} to the MaterializeInc/materialize repo"
            )
    else:
        branch = git.rev_parse("HEAD", abbrev=True)
        ui.say("")
        ui.say(f"Create a PR with your branch: '{branch}'")
コード例 #24
0
    def __init__(
        self, repo: mzbuild.Repository, name: str, preserve_ports: bool = False
    ):
        self.name = name
        self.repo = repo
        self.images: List[mzbuild.Image] = []
        self.workflows: Dict[str, Callable[..., None]] = {}

        self.default_tag = os.getenv(f"MZBUILD_TAG", None)

        if name in self.repo.compositions:
            self.path = self.repo.compositions[name]
        else:
            raise UnknownCompositionError(name)

        # load the mzcompose.yml file, if one exists
        mzcompose_yml = self.path / "mzcompose.yml"
        if mzcompose_yml.exists():
            with open(mzcompose_yml) as f:
                compose = yaml.safe_load(f) or {}
        else:
            compose = {}

        self.compose = compose

        if "version" not in compose:
            compose["version"] = "3.7"

        if "services" not in compose:
            compose["services"] = {}

        # Load the mzcompose.py file, if one exists
        mzcompose_py = self.path / "mzcompose.py"
        if mzcompose_py.exists():
            spec = importlib.util.spec_from_file_location("mzcompose", mzcompose_py)
            assert spec
            module = importlib.util.module_from_spec(spec)
            assert isinstance(spec.loader, importlib.abc.Loader)
            spec.loader.exec_module(module)
            for name, fn in getmembers(module, isfunction):
                if name.startswith("workflow_"):
                    # The name of the workflow is the name of the function
                    # with the "workflow_" prefix stripped and any underscores
                    # replaced with dashes.
                    name = name[len("workflow_") :].replace("_", "-")
                    self.workflows[name] = fn

            for python_service in getattr(module, "SERVICES", []):
                compose["services"][python_service.name] = python_service.config

        for name, config in compose["services"].items():
            if "propagate_uid_gid" in config:
                if config["propagate_uid_gid"]:
                    config["user"] = f"{os.getuid()}:{os.getgid()}"
                del config["propagate_uid_gid"]

            ports = config.setdefault("ports", [])
            for i, port in enumerate(ports):
                if ":" in str(port):
                    raise UIError(
                        "programming error: disallowed host port in service {name!r}"
                    )
                if preserve_ports:
                    # If preserving ports, bind the container port to the same
                    # host port.
                    ports[i] = f"{port}:{port}"

            if self.repo.rd.coverage:
                # Emit coverage information to a file in a directory that is
                # bind-mounted to the "coverage" directory on the host. We
                # inject the configuration to all services for simplicity, but
                # this only have an effect if the service runs instrumented Rust
                # binaries.
                config.setdefault("environment", []).append(
                    f"LLVM_PROFILE_FILE=/coverage/{name}-%m.profraw"
                )
                config.setdefault("volumes", []).append("./coverage:/coverage")

        # Add default volumes
        compose.setdefault("volumes", {}).update(
            {
                "mzdata": None,
                "tmp": None,
                "secrets": None,
            }
        )

        self._resolve_mzbuild_references()

        # Emit the munged configuration to a temporary file so that we can later
        # pass it to Docker Compose.
        self.file = TemporaryFile()
        os.set_inheritable(self.file.fileno(), True)
        self._write_compose()
コード例 #25
0
def main() -> int:
    parser = argparse.ArgumentParser(
        prog="run",
        description="""Build and run a core service or test.
        Wraps `cargo run` and `cargo test` with Materialize-specific logic.""",
    )
    parser.add_argument(
        "program",
        help="the name of the program to run",
        choices=[*KNOWN_PROGRAMS, "test"],
    )
    parser.add_argument(
        "args",
        help="Arguments to pass to the program",
        nargs="*",
    )
    parser.add_argument(
        "--reset",
        help="Delete data from prior runs of the program",
        action="store_true",
    )
    parser.add_argument(
        "--postgres",
        help="PostgreSQL connection string",
        default=os.getenv("MZDEV_POSTGRES", DEFAULT_POSTGRES),
    )
    parser.add_argument(
        "--release",
        help="Build artifacts in release mode, with optimizations",
        action="store_true",
    )
    parser.add_argument(
        "--timings",
        help="Output timing information",
        action="store_true",
    )
    parser.add_argument(
        "--no-default-features",
        help="Do not activate the `default` feature",
        action="store_true",
    )
    parser.add_argument(
        "-p",
        "--package",
        help="Package to run tests for",
        action="append",
        default=[],
    )
    parser.add_argument(
        "--test",
        help="Test only the specified test target",
        action="append",
        default=[],
    )
    parser.add_argument(
        "--tokio-console",
        help="Activate the Tokio console",
        action="store_true",
    )
    args = parser.parse_intermixed_args()

    # Handle `+toolchain` like rustup.
    args.channel = None
    if len(args.args) > 0 and args.args[0].startswith("+"):
        args.channel = args.args[0]
        del args.args[0]

    if args.program in KNOWN_PROGRAMS:
        _build(args, extra_programs=[args.program])
        if args.release:
            path = ROOT / "target" / "release" / args.program
        else:
            path = ROOT / "target" / "debug" / args.program
        command = [str(path), *args.args]
        if args.tokio_console:
            command += ["--tokio-console-listen-addr=127.0.0.1:6669"]
        if args.program == "environmentd":
            if args.reset:
                print("Removing mzdata directory...")
                shutil.rmtree("mzdata", ignore_errors=True)
            for schema in ["consensus", "catalog", "storage"]:
                if args.reset:
                    _run_sql(args.postgres, f"DROP SCHEMA IF EXISTS {schema} CASCADE")
                _run_sql(args.postgres, f"CREATE SCHEMA IF NOT EXISTS {schema}")
            command += [
                f"--persist-consensus-url={args.postgres}?options=--search_path=consensus",
                f"--catalog-postgres-stash={args.postgres}?options=--search_path=catalog",
                f"--storage-postgres-stash={args.postgres}?options=--search_path=storage",
            ]
        elif args.program == "sqllogictest":
            command += [f"--postgres-url={args.postgres}"]
    elif args.program == "test":
        _build(args)
        command = _cargo_command(args, "test")
        for package in args.package:
            command += ["--package", package]
        for test in args.test:
            command += ["--test", test]
        command += args.args
        command += ["--", "--nocapture"]
        os.environ["POSTGRES_URL"] = args.postgres
    else:
        raise UIError(f"unknown program {args.program}")

    print(f"$ {' '.join(command)}")
    os.execvp(command[0], command)
コード例 #26
0
    def __init__(
        self,
        repo: mzbuild.Repository,
        name: str,
        preserve_ports: bool = False,
        silent: bool = False,
        munge_services: bool = True,
    ):
        self.name = name
        self.description = None
        self.repo = repo
        self.preserve_ports = preserve_ports
        self.silent = silent
        self.workflows: Dict[str, Callable[..., None]] = {}
        self.test_results: OrderedDict[str, Composition.TestResult] = OrderedDict()

        if name in self.repo.compositions:
            self.path = self.repo.compositions[name]
        else:
            raise UnknownCompositionError(name)

        # load the mzcompose.yml file, if one exists
        mzcompose_yml = self.path / "mzcompose.yml"
        if mzcompose_yml.exists():
            with open(mzcompose_yml) as f:
                compose = yaml.safe_load(f) or {}
        else:
            compose = {}

        self.compose = compose

        if "version" not in compose:
            compose["version"] = "3.7"

        if "services" not in compose:
            compose["services"] = {}

        # Load the mzcompose.py file, if one exists
        mzcompose_py = self.path / "mzcompose.py"
        if mzcompose_py.exists():
            spec = importlib.util.spec_from_file_location("mzcompose", mzcompose_py)
            assert spec
            module = importlib.util.module_from_spec(spec)
            assert isinstance(spec.loader, importlib.abc.Loader)
            spec.loader.exec_module(module)
            self.description = inspect.getdoc(module)
            for name, fn in getmembers(module, isfunction):
                if name.startswith("workflow_"):
                    # The name of the workflow is the name of the function
                    # with the "workflow_" prefix stripped and any underscores
                    # replaced with dashes.
                    name = name[len("workflow_") :].replace("_", "-")
                    self.workflows[name] = fn

            for python_service in getattr(module, "SERVICES", []):
                name = python_service.name
                if name in compose["services"]:
                    raise UIError(f"service {name!r} specified more than once")
                compose["services"][name] = python_service.config

        # Add default volumes
        compose.setdefault("volumes", {}).update(
            {
                "mzdata": None,
                "pgdata": None,
                "mydata": None,
                "tmp": None,
                "secrets": None,
            }
        )

        # The CLI driver will handle acquiring these dependencies.
        if munge_services:
            self.dependencies = self._munge_services(compose["services"].items())

        # Emit the munged configuration to a temporary file so that we can later
        # pass it to Docker Compose.
        self.file = TemporaryFile(mode="w")
        os.set_inheritable(self.file.fileno(), True)
        self._write_compose()