Пример #1
0
    def validate(self):
        semver.parse(self.release_version)
        existing_dir(self.repo_dir)

        # check whether a release with the given version exists
        try:
            self.github_helper.repository.release_from_tag(
                self.release_version)
        except NotFoundError:
            raise RuntimeError(
                f'No release with tag {self.release_version} found')
Пример #2
0
def export_kubeconfig(
    kubernetes_config_name: str,
    output_file: str,
):
    '''Write the kubeconfig contained in a kubernetes config to a given path.
    '''
    cfg_factory = ctx().cfg_factory()
    kubernetes_cfg = cfg_factory.kubernetes(kubernetes_config_name)

    destination_path = pathlib.Path(output_file).resolve()
    existing_dir(destination_path.parent)

    with destination_path.open(mode='w') as f:
        yaml.dump(kubernetes_cfg.kubeconfig(), f)
Пример #3
0
    def from_cfg_dir(cfg_dir: str, cfg_types_file='config_types.yaml'):
        cfg_dir = existing_dir(os.path.abspath(cfg_dir))
        cfg_types_dict = parse_yaml_file(os.path.join(cfg_dir, cfg_types_file))
        raw = {}

        raw[ConfigFactory.CFG_TYPES] = cfg_types_dict

        def parse_cfg(cfg_type):
            # assume for now that there is exactly one cfg source (file)
            cfg_sources = list(cfg_type.sources())
            if not len(cfg_sources) == 1:
                raise ValueError(
                    'currently, only exactly one cfg file is supported per type'
                )

            cfg_file = cfg_sources[0].file()
            parsed_cfg = parse_yaml_file(os.path.join(cfg_dir, cfg_file))
            return parsed_cfg

        # parse all configurations
        for cfg_type in map(ConfigType, cfg_types_dict.values()):
            cfg_name = cfg_type.cfg_type_name()
            raw[cfg_name] = parse_cfg(cfg_type)

        return ConfigFactory(raw_dict=raw)
Пример #4
0
    def validate(self):
        existing_dir(self.repo_dir)
        semver.parse(self.release_version)

        if (self.release_commit_callback):
            existing_file(self.release_commit_callback)
        if self.next_version_callback:
            existing_file(self.next_version_callback)

        existing_file(self.repository_version_file_path)

        # perform version ops once to validate args
        self._calculate_next_cycle_dev_version(
            release_version=self.release_version,
            version_operation=self.version_operation,
            prerelease_suffix=self.prerelease_suffix,
        )
Пример #5
0
def _cfg_factory_from_dir():
    if Config.CONTEXT.value.config_dir() is None:
        return None

    from util import existing_dir
    cfg_dir = existing_dir(Config.CONTEXT.value.config_dir())

    from model import ConfigFactory
    factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir)
    return factory
Пример #6
0
    def enumerate_local_repo(self, repo_dir: str):
        repo_dir = existing_dir(Path(repo_dir))
        if not repo_dir.joinpath('.git').is_dir():
            raise ValueError('not a git root directory: {r}'.format(self.repo_dir))

        for path in self.CODEOWNERS_PATHS:
            codeowners_file = repo_dir.joinpath(path)
            if codeowners_file.is_file():
                with open(codeowners_file) as f:
                    yield from self._filter_codeowners_entries(f.readlines())
Пример #7
0
def determine_head_commit_recipients(src_dirs=(), ):
    '''returns a generator yielding e-mail adresses from the head commit's author and
    committer for all given repository work trees.
    '''
    for src_dir in src_dirs:
        # commiter/author from head commit
        repo = git.Repo(existing_dir(src_dir))
        head_commit = repo.commit(repo.head)
        yield head_commit.author.email.lower()
        yield head_commit.committer.email.lower()
Пример #8
0
def _metadata_dict():
    # XXX mv to concourse package; deduplicate with notify step
    if not util._running_on_ci():
        return {}

    # XXX do not hard-code meta-dir
    meta_dir = util.existing_dir(
        os.path.join(util._root_dir(), os.environ.get('META')))

    attrs = (
        'atc-external-url',
        'build-team-name',
        'build-pipeline-name',
        'build-job-name',
        'build-name',
    )

    def read_attr(name):
        with open(os.path.join(meta_dir, name)) as f:
            return f.read().strip()

    meta_dict = {name: read_attr(name) for name in attrs}

    # XXX deduplicate; mv to concourse package
    meta_dict['concourse_url'] = util.urljoin(
        meta_dict['atc-external-url'],
        'teams',
        meta_dict['build-team-name'],
        'pipelines',
        meta_dict['build-pipeline-name'],
        'jobs',
        meta_dict['build-job-name'],
        'builds',
        meta_dict['build-name'],
    )

    # XXX do not hard-code env variables
    meta_dict['effective_version'] = os.environ.get('EFFECTIVE_VERSION')
    meta_dict['component_name'] = os.environ.get('COMPONENT_NAME')
    meta_dict['creation_date'] = datetime.datetime.now().isoformat()

    return meta_dict
Пример #9
0
 def __init__(self, base_dir):
     self.base_dir = existing_dir(base_dir)
Пример #10
0
def deploy_or_upgrade_landscape(
    config_set_name: CliHint(typehint=str, help=CONFIG_SET_HELP),
    components: CliHint(
        type=LandscapeComponent,
        typehint=[LandscapeComponent],
        choices=[component for component in LandscapeComponent],
        help=
        "list of components to deploy. By default, ALL components will be deployed."
    ) = None,
    webhook_dispatcher_chart_dir: CliHint(
        typehint=str,
        help="directory of webhook dispatcher chart",
    ) = None,
    concourse_deployment_name: CliHint(
        typehint=str,
        help="namespace and deployment name for Concourse") = 'concourse',
    timeout_seconds: CliHint(
        typehint=int, help="how long to wait for concourse startup") = 180,
    webhook_dispatcher_deployment_name: str = 'webhook-dispatcher',
    dry_run: bool = True,
):
    '''Deploys the given components of the Concourse landscape.
    '''
    # handle default (all known components)
    if not components:
        components = [component for component in LandscapeComponent]
    # Validate
    if LandscapeComponent.WHD in components:
        if not webhook_dispatcher_chart_dir:
            raise ValueError(
                f"--webhook-dispatcher-chart-dir must be given if component "
                f"'{LandscapeComponent.WHD.value}' is to be deployed.")
        else:
            webhook_dispatcher_chart_dir = existing_dir(
                webhook_dispatcher_chart_dir)

    _display_info(
        dry_run=dry_run,
        operation="DEPLOYED",
        deployment_name=concourse_deployment_name,
        components=components,
    )

    if dry_run:
        return

    cfg_factory = ctx().cfg_factory()
    config_set = cfg_factory.cfg_set(config_set_name)
    concourse_cfg = config_set.concourse()

    # Set the global kubernetes cluster context to the cluster specified in the ConcourseConfig
    kubernetes_config_name = concourse_cfg.kubernetes_cluster_config()
    kubernetes_cfg = cfg_factory.kubernetes(kubernetes_config_name)
    kube_ctx.set_kubecfg(kubernetes_cfg.kubeconfig())
    ensure_cluster_version(kubernetes_cfg)

    if LandscapeComponent.SECRETS_SERVER in components:
        info('Deploying Secrets Server')
        deploy_secrets_server(config_set_name=config_set_name, )

    if LandscapeComponent.CONCOURSE in components:
        info('Deploying Concourse')
        deploy_or_upgrade_concourse(
            config_set_name=config_set_name,
            deployment_name=concourse_deployment_name,
            timeout_seconds=timeout_seconds,
        )

    if LandscapeComponent.WHD in components:
        info('Deploying Webhook Dispatcher')
        deploy_or_upgrade_webhook_dispatcher(
            config_set_name=config_set_name,
            chart_dir=webhook_dispatcher_chart_dir,
            deployment_name=webhook_dispatcher_deployment_name,
        )

    if LandscapeComponent.MONITORING in components:
        info('Deploying Monitoring stack')
        deploy_or_upgrade_monitoring(config_set_name=config_set_name, )

    if LandscapeComponent.CLAMAV in components:
        info('Deploying ClamAV')
        deploy_or_upgrade_clamav(config_set_name=config_set_name, )
Пример #11
0
def update_submodule(
    repo_path: str,
    tree_ish: str,
    submodule_path: str,
    commit_hash: str,
    author: str,
    email: str,
):
    '''Update the submodule of a git-repository to a specific commit.

    Create a new commit, with the passed tree-ish as parent, in the given repository.

    Note that this implementation only supports toplevel submodules. To be removed in a
    future version.

    Parameters
    ------
    repo_path : str
        Path to a directory containing an intialised git-repo with a submodule to update.
    tree_ish : str
        Valid tree-ish to use as base for creating the new commit. Used as parent for the
        commit to be created
        Example: 'master' for the head of the master-branch.
    submodule_path : str
        Path (relative to the repository root) to the submodule. Must be immediately below the root
        of the repository.
    commit_hash : str
        The hash the submodule should point to in the created commit. This should be a valid commit-
        hash in the submodule's repository.
    author : str,
        Will be set as author of the created commit
    email : str
        Will be set for the author of the created commit

    Returns
    ------
    str
        The hexadecimal SHA-1 hash of the created commit
    '''
    repo_path = existing_dir(os.path.abspath(repo_path))

    not_empty(submodule_path)
    if '/' in submodule_path:
        fail('This implementation only supports toplevel submodules: {s}'.
             format(s=submodule_path))

    not_empty(tree_ish)
    not_empty(commit_hash)
    not_empty(author)
    not_empty(email)

    repo = git.Repo(repo_path)
    _ensure_submodule_exists(repo, submodule_path)

    # Create mk-tree-parseable string-representation from given tree-ish.
    tree = repo.tree(tree_ish)
    tree_representation = _serialise_and_update_submodule(
        tree, submodule_path, commit_hash)

    # Pass the patched tree to git mk-tree using GitPython. We cannot do this in GitPython
    # directly as it does not support arbitrary tree manipulation.
    # We must keep a reference to auto_interrupt as it closes all streams to the subprocess
    # on finalisation
    auto_interrupt = repo.git.mktree(istream=subprocess.PIPE, as_process=True)
    process = auto_interrupt.proc
    stdout, _ = process.communicate(input=tree_representation.encode())

    # returned string is byte-encoded and newline-terminated
    new_sha = stdout.decode('utf-8').strip()

    # Create a new commit in the repo's object database from the newly created tree.
    actor = git.Actor(author, email)
    parent_commit = repo.commit(tree_ish)
    commit = git.Commit.create_from_tree(
        repo=repo,
        tree=new_sha,
        parent_commits=[parent_commit],
        message='Upgrade submodule {s} to commit {c}'.format(
            s=submodule_path,
            c=commit_hash,
        ),
        author=actor,
        committer=actor,
    )

    return commit.hexsha