def __init__( self, *, services: Optional[List[str]] = None, destroy_volumes: bool = False ) -> None: self._services = services if services is not None else [] self._destroy_volumes = destroy_volumes if not isinstance(self._services, list): raise errors.BadSpec(f"services should be a list, got: {self._services}")
def build_compose_file(repo: mzbuild.Repository, command: str, config_file: str) -> IO[bytes]: """Substitute known keys with mzbuild-provided values * Replace `mzimage` with fingerprinted image names """ images = [] default = os.getenv(f"MZBUILD_DOCKER_TAG", None) with open(config_file) as f: compose = yaml.safe_load(f) # strip mzconduct top-level key, if it exists compose.pop("mzconduct", None) for config in compose["services"].values(): if "mzbuild" in config: image_name = config["mzbuild"] if image_name not in repo.images: raise errors.BadSpec( f"mzcompose: unknown image {image_name}") image = repo.images[image_name] override_tag = os.getenv(f"MZBUILD_{image.env_var_name()}_TAG", default) if override_tag is not None: config["image"] = image.docker_name(override_tag) print( f"mzcompose: warning: overriding {image_name} image to tag {override_tag}", file=sys.stderr, ) del config["mzbuild"] else: images.append(image) if "propagate-uid-gid" in config: config["user"] = f"{os.getuid()}:{os.getgid()}" del config["propagate-uid-gid"] deps = repo.resolve_dependencies(images) for d in deps: say(d.spec()) for config in compose["services"].values(): if "mzbuild" in config: config["image"] = deps[config["mzbuild"]].spec() del config["mzbuild"] # Check if the command is going to create or start containers, and if so # build the dependencies. This can be slow, so we don't want to do it if we # can help it (e.g., for `down` or `ps`). if command in ["create", "run", "start", "up"]: deps.acquire() # Construct a configuration that will point Docker Compose at the correct # images. tempfile = TemporaryFile() os.set_inheritable(tempfile.fileno(), True) yaml.dump(compose, tempfile, encoding="utf-8") # type: ignore tempfile.flush() tempfile.seek(0) return tempfile
def __init__( self, *, services: Optional[List[str]] = None, signal: Optional[str] = None ) -> None: self._services = services if services is not None else [] if not isinstance(self._services, list): raise errors.BadSpec(f"services should be a list, got: {self._services}") self._signal = signal
def get_workflow( self, parent_env: Dict[str, str], workflow_name: str ) -> "Workflow": """Return sub-workflow, with env vars substituted using the supplied environment.""" if not self.workflows: raise KeyError(f"No workflows defined for composition {self.name}") if workflow_name not in self.workflows: raise KeyError(f"No workflow called {workflow_name} in {self.name}") # Build this workflow, performing environment substitution as necessary workflow_env = self.get_env(workflow_name, parent_env) workflow = _substitute_env_vars(self.workflows[workflow_name], workflow_env) built_steps = [] for raw_step in workflow["steps"]: # A step could be reused over several workflows, so operate on a copy raw_step = raw_step.copy() step_name = raw_step.pop("step") step_ty = Steps.named(step_name) munged = {k.replace("-", "_"): v for k, v in raw_step.items()} try: step = step_ty(**munged) except TypeError as e: a = " ".join([f"{k}={v}" for k, v in munged.items()]) raise errors.BadSpec( f"Unable to construct {step_name} with args {a}: {e}" ) built_steps.append(step) return Workflow(workflow_name, built_steps, env=workflow_env, composition=self)
def get_env(self, workflow_name: str, parent_env: Dict[str, str]) -> Dict[str, str]: """Return the desired environment for a workflow.""" raw_env = self.workflows[workflow_name].get("env") if not isinstance(raw_env, dict) and raw_env is not None: raise errors.BadSpec( f"Workflow {workflow_name} has wrong type for env: " f"expected mapping, got {type(raw_env).__name__}: {raw_env}", ) # ensure that integers (e.g. ports) are treated as env vars if isinstance(raw_env, dict): raw_env = {k: str(v) for k, v in raw_env.items()} # Substitute environment variables from the parent environment, allowing for the child # environment to inherit variables from the parent child_env = _substitute_env_vars(raw_env, parent_env) # Merge the child and parent environments, with the child environment having the tie # breaker. This allows for the child to decide if it wants to inherit (from the step # above) or override (from this step). env = dict(**parent_env) if child_env: env.update(**child_env) return env
def __init__(self, repo: mzbuild.Repository, name: str): self.name = name self.repo = repo self.images: List[mzbuild.Image] = [] default_tag = os.getenv(f"MZBUILD_TAG", None) if name in self.repo.compositions: self.path = self.repo.compositions[name] else: raise errors.UnknownComposition with open(self.path) as f: compose = yaml.safe_load(f) # Stash away sub workflows so that we can load them with the correct environment variables self.workflows = compose.pop("mzworkflows", None) # Resolve all services that reference an `mzbuild` image to a specific # `image` reference. for config in compose["services"].values(): if "mzbuild" in config: image_name = config["mzbuild"] if image_name not in self.repo.images: raise errors.BadSpec(f"mzcompose: unknown image {image_name}") image = self.repo.images[image_name] override_tag = os.getenv( f"MZBUILD_{image.env_var_name()}_TAG", default_tag ) if override_tag is not None: config["image"] = image.docker_name(override_tag) print( f"mzcompose: warning: overriding {image_name} image to tag {override_tag}", file=sys.stderr, ) del config["mzbuild"] else: self.images.append(image) if "propagate-uid-gid" in config: config["user"] = f"{os.getuid()}:{os.getgid()}" del config["propagate-uid-gid"] deps = self.repo.resolve_dependencies(self.images) for config in compose["services"].values(): if "mzbuild" in config: config["image"] = deps[config["mzbuild"]].spec() del config["mzbuild"] # Emit the munged configuration to a temporary file so that we can later # pass it to Docker Compose. tempfile = TemporaryFile() os.set_inheritable(tempfile.fileno(), True) yaml.dump(compose, tempfile, encoding="utf-8") # type: ignore tempfile.flush() self.file = tempfile
def parse_known_args( self, args: Optional[Sequence[Text]] = None, namespace: Optional[argparse.Namespace] = None, ) -> Tuple[argparse.Namespace, List[str]]: ns = argparse.Namespace() try: (pargs, unknown_args) = super().parse_known_args(args, namespace=ns) if pargs.mz_build_mode not in ["dev", "release"]: raise errors.BadSpec( f'unknown build mode {pargs.mz_build_mode!r} (expected "dev" or "release")' ) if "--mz-build-mode" in pargs.remainder: raise errors.BadSpec( f"--mz-build-mode must be provided before the command: '{pargs.command}'" ) return (pargs, unknown_args) except ValueError: return (ns, [])
def _alt_subst(env: Dict[str, str], match: Match) -> str: var = match.group("var") if var is None: raise errors.BadSpec(f"Unable to parse environment variable {match.group(0)}") # https://github.com/python/typeshed/issues/3902 altvar = cast(Optional[str], match.group("alt_var")) assert altvar is not None, "alt var not captured by regex" env_val = env.get(var) if env_val is None: return "" return altvar
def _subst(match: Match) -> str: var = match.group("var") if var is None: raise errors.BadSpec(f"Unable to parse environment variable {match.group(0)}") # https://github.com/python/typeshed/issues/3902 default = cast(Optional[str], match.group("default")) env_val = os.getenv(var) if env_val is None and default is None: say(f"WARNING: unknown env var {var!r}") return cast(str, match.group(0)) elif env_val is None and default is not None: # strip the leading ":-" env_val = default[2:] assert env_val is not None, "should be replaced correctly" return env_val
def parse_known_args( self, args: Optional[Sequence[Text]] = None, namespace: Optional[argparse.Namespace] = None, ) -> Tuple[argparse.Namespace, List[str]]: ns = argparse.Namespace() try: (pargs, unknown_args) = super().parse_known_args(args, namespace=ns) if pargs.mz_build_mode not in ["dev", "release"]: raise errors.BadSpec( f'unknown build mode {pargs.mz_build_mode!r} (expected "dev" or "release")' ) return (pargs, unknown_args) except ValueError: return (ns, [])
def main() -> None: parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(dest="subcommand") for name, configure, run in [ ("create", create.configure_parser, create.run), ("mine", mine.configure_parser, mine.run), ("destroy", destroy.configure_parser, destroy.run), ]: s = subparsers.add_parser(name) configure(s) s.set_defaults(run=run) args = parser.parse_args() # TODO - Pass `required=True` to parser.add_subparsers once we support 3.7 if not "run" in args: raise errors.BadSpec("Must specify a command") args.run(args)
def __init__(self, repo: mzbuild.Repository, name: str): self.name = name self.repo = repo self.images: List[mzbuild.Image] = [] self.workflows: Dict[str, Workflow] = {} default_tag = os.getenv(f"MZBUILD_TAG", None) if name in self.repo.compositions: self.path = self.repo.compositions[name] else: raise errors.UnknownComposition with open(self.path) as f: compose = yaml.safe_load(f) workflows = compose.pop("mzworkflows", None) if workflows is not None: # TODO: move this into the workflow so that it can use env vars that are # manually defined. workflows = _substitute_env_vars(workflows) for workflow_name, raw_w in workflows.items(): built_steps = [] for raw_step in raw_w["steps"]: step_name = raw_step.pop("step") step_ty = Steps.named(step_name) munged = { k.replace("-", "_"): v for k, v in raw_step.items() } try: step = step_ty(**munged) except TypeError as e: a = " ".join([f"{k}={v}" for k, v in munged.items()]) raise errors.BadSpec( f"Unable to construct {step_name} with args {a}: {e}" ) built_steps.append(step) env = raw_w.get("env") if not isinstance(env, dict) and env is not None: raise errors.BadSpec( f"Workflow {workflow_name} has wrong type for env: " f"expected mapping, got {type(env).__name__}: {env}", ) # ensure that integers (e.g. ports) are treated as env vars if isinstance(env, dict): env = {k: str(v) for k, v in env.items()} self.workflows[workflow_name] = Workflow(workflow_name, built_steps, env=env, composition=self) # Resolve all services that reference an `mzbuild` image to a specific # `image` reference. for config in compose["services"].values(): if "mzbuild" in config: image_name = config["mzbuild"] if image_name not in self.repo.images: raise errors.BadSpec( f"mzcompose: unknown image {image_name}") image = self.repo.images[image_name] override_tag = os.getenv(f"MZBUILD_{image.env_var_name()}_TAG", default_tag) if override_tag is not None: config["image"] = image.docker_name(override_tag) print( f"mzcompose: warning: overriding {image_name} image to tag {override_tag}", file=sys.stderr, ) del config["mzbuild"] else: self.images.append(image) if "propagate-uid-gid" in config: config["user"] = f"{os.getuid()}:{os.getgid()}" del config["propagate-uid-gid"] deps = self.repo.resolve_dependencies(self.images) for config in compose["services"].values(): if "mzbuild" in config: config["image"] = deps[config["mzbuild"]].spec() del config["mzbuild"] # Emit the munged configuration to a temporary file so that we can later # pass it to Docker Compose. tempfile = TemporaryFile() os.set_inheritable(tempfile.fileno(), True) yaml.dump(compose, tempfile, encoding="utf-8") # type: ignore tempfile.flush() self.file = tempfile