def test_fail(): with capture_out() as (stdout, stderr): with pytest.raises(Failure): examinee.fail(msg='foo bar') assert 'ERROR: foo bar' == stderr.getvalue().strip() assert len(stdout.getvalue()) == 0
def calculate_range( repository_branch: str, git_helper: GitHelper, github_helper: GitHubRepositoryHelper, ) -> str: repo = git_helper.repo branch_head = git_helper.fetch_head(ref=repository_branch) if not branch_head: fail('could not determine branch head of {branch} branch'.format( branch=repository_branch)) range_start = _.head( reachable_release_tags_from_commit(github_helper, repo, branch_head)) try: # better readable range_end by describing head commit range_end = repo.git.describe(branch_head, tags=True) except GitError as err: warning( 'failed to describe branch head, maybe the repository has no tags? ' 'GitError: {err}. Falling back to branch head commit hash.'.format( err=err)) range_end = branch_head.hexsha commit_range = "{start}..{end}".format(start=range_start, end=range_end) return commit_range
def calculate_range( self, repository_branch: str, ) -> str: repo = self.git_helper.repo branch_head = self.git_helper.fetch_head(ref=repository_branch) if not branch_head: fail( f'could not determine branch head of {repository_branch} branch' ) range_start = _.head( self.reachable_release_tags_from_commit( repo=repo, commit=branch_head, ), ) try: # better readable range_end by describing head commit range_end = repo.git.describe(branch_head, tags=True) except GitError as err: logger.warning( 'failed to describe branch head, maybe the repository has no tags? ' f'GitError: {err}. Falling back to branch head commit hash.') range_end = branch_head.hexsha commit_range = f'{range_start}..{range_end}' return commit_range
def template_file(self, template_name): # TODO: do not hard-code file name extension template_file_name = template_name + '.mako' for path in self.template_path: for dirpath, _, filenames in os.walk(path): if template_file_name in filenames: return os.path.join(dirpath, template_file_name) fail('could not find template {t}, tried in {p}'.format( t=str(template_name), p=','.join(map(str, self.template_path))))
def patch_deployment(self, name: str, namespace: str, body: dict): '''Patches a deployment with a given name in the given namespace.''' not_empty(name) not_empty(namespace) not_empty(body) if not self.get_deployment(namespace, name): fail(f'Deployment {name} in namespace {namespace} does not exist') self.apps_api.patch_namespaced_deployment(name, namespace, body)
def diff_pipelines(left_file: CliHints.yaml_file(), right_file: CliHints.yaml_file()): from deepdiff import DeepDiff from pprint import pprint diff = DeepDiff(left_file, right_file, ignore_order=True) if diff: pprint(diff) fail('diffs were found') else: info('the yaml documents are equivalent')
def cfg_factory(): from ci.util import fail factory = _cfg_factory_from_dir() # fallback to secrets-server if not factory: factory = _cfg_factory_from_secrets_server() if not factory: fail('cfg_factory is required. configure using the global --cfg-dir option or via env') return factory
def get_kubecfg(self): if self.kubeconfig: return kubernetes.client.ApiClient(configuration=self.kubeconfig) kubeconfig = os.environ.get('KUBECONFIG', None) args = global_ctx().args if args and hasattr(args, 'kubeconfig') and args.kubeconfig: kubeconfig = args.kubeconfig if self.kubeconfig: kubeconfig = self.kubeconfig if not kubeconfig: fail('KUBECONFIG env var must be set') return config.load_kube_config(existing_file(kubeconfig))
def send_mail( email_cfg_name: CliHint( help="reference to an email cfg (see repo cc-config / secrets-server)" ), recipients: CliHint(typehint=[str], help="Recipient email address"), mail_template_file: CliHints.existing_file(), subject: CliHint(help="email subject"), cc_recipients: CliHint(typehint=[str], help="Carbon copy email address") = [], replace_token: CliHint(typehint=[str], help="<key>=<value> (replace <key> in body)") = [], ): ''' Sends an email using the specified email_cfg (retrieved from a cfg_factory) to the specified recipients. The mail body is read from a file. A simple token-replacement is done if (optional) replace-tokens are given. @param recipients: mail recipients (email addresses) @param mail_template_file: path to the mail template file. Must exist. @param subject: email subject @param cc_recipients: cc mail recipients @param replace_token: format: <token>=<replace-value> - tokens in mail-body are replaced ''' not_empty(email_cfg_name) cfg_factory = ctx().cfg_factory() email_cfg = cfg_factory.email(email_cfg_name) with open(mail_template_file) as f: mail_template = f.read() # validate template-tokens invalid_tokens = filter(lambda t: not isinstance(t, str) or '=' not in t, replace_token) if len(list(invalid_tokens)) > 0: fail('all replace-tokens must be of form <key>=<value>: ' + ' '.join(invalid_tokens)) # parse replace-tokens replace_tokens = dict(map(lambda t: t.split('=', 1), replace_token)) _send_mail( email_cfg=email_cfg, recipients=recipients, mail_template=mail_template, subject=subject, cc_recipients=cc_recipients, replace_tokens=replace_tokens, )
def add_dependencies( descriptor_src_file: CliHints.existing_file(), component_name: str, component_version: str, descriptor_out_file: str=None, component_dependencies: CliHint(action='append')=[], container_image_dependencies: CliHint(action='append')=[], web_dependencies: CliHint(action='append')=[], generic_dependencies: CliHint(action='append')=[], validation_policies: CliHint( type=ValidationPolicy, typehint=[ValidationPolicy], choices=[policy for policy in ValidationPolicy], )=[], ): product = ComponentDescriptor.from_dict(parse_yaml_file(descriptor_src_file)) component = product.component( ComponentReference.create(name=component_name, version=component_version) ) if not component: fail('component {c}:{v} was not found in {f}'.format( c=component_name, v=component_version, f=descriptor_src_file )) # maintain old behaviour if not validation_policies: validation_policies = [ValidationPolicy.FORBID_EXTRA_ATTRIBUTES] dependencies = _parse_dependencies( component_dependencies=component_dependencies, container_image_dependencies=container_image_dependencies, web_dependencies=web_dependencies, generic_dependencies=generic_dependencies, validation_policies=validation_policies, ) component.add_dependencies(dependencies) product_dict = {'components': [component.raw]} print(yaml.dump(product_dict, indent=2)) product_dict = json.loads(json.dumps({'components': [component.raw]})) if not descriptor_out_file: print(yaml.dump(product_dict, indent=2)) else: with open(descriptor_out_file, 'w') as f: yaml.dump(product_dict, f, indent=2)
def merge_descriptors(descriptors: [str]): if len(descriptors) < 2: fail('at least two descriptors are required for merging') def parse_product_file(f): return ComponentDescriptor.from_dict(parse_yaml_file(f)) merged = parse_product_file(descriptors[0]) for descriptor in map(parse_product_file, descriptors[1:]): merged = merge_products(merged, descriptor) # workaround snd-issues (TODO: remove snd) cleansed_dict = json.loads(json.dumps(merged.raw)) print(yaml.dump(cleansed_dict, indent=2))
def ensure_cluster_version(kubernetes_config: KubernetesConfig): not_none(kubernetes_config) cluster_version_info = get_cluster_version_info() configured_version_info = kubernetes_config.cluster_version() if (cluster_version_info.major != configured_version_info['major'] or cluster_version_info.minor != configured_version_info['minor']): fail( 'cluster version mismatch "Major: {a_major} Minor: ' '{a_minor}". Expected "Major: {e_major} Minor: {e_minor}".'.format( a_major=cluster_version_info.major, a_minor=cluster_version_info.minor, e_major=configured_version_info['major'], e_minor=configured_version_info['minor'], ))
def retrieve_component_descriptor( name: str, version: str, ): cfg_factory = ctx().cfg_factory() resolver = ComponentDescriptorResolver( cfg_factory=cfg_factory, ) component_reference = ComponentReference.create(name=name, version=version) try: resolved_descriptor = resolver.retrieve_raw_descriptor(component_reference) except github3.exceptions.NotFoundError: fail('no component descriptor found: {n}:{v}'.format(n=name, v=version)) print(resolved_descriptor)
def reachable_release_tags_from_commit( self, repo: git.Repo, commit: git.objects.Commit) -> typing.List[str]: '''Returns a list of release-tags whose tagged commits are ancestors of the given commit. The returned list is sorted in descending order, putting the greatest reachable tag first. ''' tags = self.release_tags() visited = set() queue = list() queue.append(commit) visited.add(commit.hexsha) reachable_tags = list() while queue: commit = queue.pop(0) if commit.hexsha in tags: reachable_tags.append(tags[commit.hexsha]) not_visited_parents = _.filter( commit.parents, lambda parent_commit: parent_commit.hexsha not in visited) if not_visited_parents: queue.extend(not_visited_parents) visited |= set( _.map(not_visited_parents, lambda commit: commit.hexsha)) reachable_tags.sort(key=lambda t: version.parse_to_semver(t), reverse=True) if not reachable_tags: logger.warning('no release tag found, falling back to root commit') root_commits = repo.iter_commits(rev=commit, max_parents=0) root_commit = next(root_commits, None) if not root_commit: fail( f'could not determine root commit from rev {commit.hexsha}' ) if next(root_commits, None): fail( 'cannot determine range for release notes. Repository has multiple root ' 'commits. Specify range via commit_range parameter.') reachable_tags.append(root_commit.hexsha) return reachable_tags
def send_mail( email_cfg_name: str, recipients: typing.Iterable[str], mail_template_file: str, subject: str, cc_recipients: typing.Iterable[str], replace_token: typing.Iterable[str], ): ''' Sends an email using the specified email_cfg (retrieved from a cfg_factory) to the specified recipients. The mail body is read from a file. A simple token-replacement is done if (optional) replace-tokens are given. @param recipients: mail recipients (email addresses) @param mail_template_file: path to the mail template file. Must exist. @param subject: email subject @param cc_recipients: cc mail recipients @param replace_token: format: <token>=<replace-value> - tokens in mail-body are replaced ''' not_empty(email_cfg_name) cfg_factory = ctx().cfg_factory() email_cfg = cfg_factory.email(email_cfg_name) with open(mail_template_file) as f: mail_template = f.read() # validate template-tokens invalid_tokens = filter(lambda t: not isinstance(t, str) or '=' not in t, replace_token) if len(list(invalid_tokens)) > 0: fail('all replace-tokens must be of form <key>=<value>: ' + ' '.join(invalid_tokens)) # parse replace-tokens replace_tokens = dict(map(lambda t: t.split('=', 1), replace_token)) _send_mail( email_cfg=email_cfg, recipients=recipients, mail_template=mail_template, subject=subject, cc_recipients=cc_recipients, replace_tokens=replace_tokens, )
def reachable_release_tags_from_commit(github_helper: GitHubRepositoryHelper, repo: git.Repo, commit: git.objects.Commit) -> [str]: tags = release_tags(github_helper, repo) visited = set() queue = list() queue.append(commit) visited.add(commit.hexsha) reachable_tags = list() while queue: commit = queue.pop(0) if commit.hexsha in tags: reachable_tags.append(tags[commit.hexsha]) not_visited_parents = _.filter( commit.parents, lambda parent_commit: parent_commit.hexsha not in visited) if not_visited_parents: queue.extend(not_visited_parents) visited |= set( _.map(not_visited_parents, lambda commit: commit.hexsha)) reachable_tags.sort(key=lambda t: version.parse_to_semver(t), reverse=True) if not reachable_tags: warning('no release tag found, falling back to root commit') root_commits = repo.iter_commits(rev=commit, max_parents=0) root_commit = next(root_commits, None) if not root_commit: fail('could not determine root commit from rev {rev}'.format( rev=commit.hexsha)) if next(root_commits, None): fail( 'cannot determine range for release notes. Repository has multiple root commits. ' 'Specify range via commit_range parameter.') reachable_tags.append(root_commit.hexsha) return reachable_tags
def add_dependencies( descriptor_src_file: CliHints.existing_file(), component_name: str, component_version: str, descriptor_out_file: str = None, component_dependencies: CliHint(typehint=_parse_component_deps, action='append') = [], container_image_dependencies: CliHint(typehint=_parse_container_image_deps, action='append') = [], web_dependencies: CliHint(typehint=_parse_web_deps, action='append') = [], generic_dependencies: CliHint(typehint=_parse_generic_deps, action='append') = [], ): product = ComponentDescriptor.from_dict( parse_yaml_file(descriptor_src_file)) component = product.component( ComponentReference.create(name=component_name, version=component_version)) if not component: fail('component {c}:{v} was not found in {f}'.format( c=component_name, v=component_version, f=descriptor_src_file)) component_deps = component.dependencies() for component_ref in component_dependencies: component_deps.add_component_dependency(component_ref) for image_dep in container_image_dependencies: component_deps.add_container_image_dependency(image_dep) for web_dep in web_dependencies: component_deps.add_web_dependency(web_dep) for generic_dep in generic_dependencies: component_deps.add_generic_dependency(generic_dep) product_dict = json.loads(json.dumps({'components': [component.raw]})) if not descriptor_out_file: print(yaml.dump(product_dict, indent=2)) else: with open(descriptor_out_file, 'w') as f: yaml.dump(product_dict, f, indent=2)
def deploy_and_run_smoketest_pipeline( config_dir: str, config_name: str, concourse_team_name: str, cc_pipelines_repo_dir: str, cc_utils_repo_dir: str, wait_for_job_execution: bool=False, ): config_factory = ConfigFactory.from_cfg_dir(cfg_dir=config_dir) config_set = config_factory.cfg_set(cfg_name=config_name) concourse_cfg = config_set.concourse() # as this is an integration test, hard-code assumptions about the layout of # our pipelines repository template_path = os.path.join(cc_utils_repo_dir, 'concourse', 'templates') template_include_dir = os.path.join(cc_utils_repo_dir, 'concourse') pipeline_name = 'cc-smoketest' # retrieve pipeline-definition from github at hardcoded location github_cfg = config_set.github() githubrepobranch = GitHubRepoBranch( github_config=github_cfg, repo_owner='kubernetes', repo_name='cc-smoketest', branch='master', ) helper = GitHubRepositoryHelper.from_githubrepobranch( githubrepobranch=githubrepobranch, ) pipeline_definition = yaml.load( helper.retrieve_text_file_contents( file_path='.ci/smoketest-pipeline.yaml', ), Loader=yaml.SafeLoader, ) definition_descriptor = DefinitionDescriptor( pipeline_name=pipeline_name, pipeline_definition=pipeline_definition[pipeline_name], main_repo={'path': 'kubernetes/cc-smoketest', 'branch': 'master'}, concourse_target_cfg=concourse_cfg, concourse_target_team=concourse_team_name, ) preprocessor = DefinitionDescriptorPreprocessor() template_retriever = TemplateRetriever(template_path=template_path) renderer = Renderer( template_retriever=template_retriever, template_include_dir=template_include_dir, cfg_set=config_set, ) deployer = ConcourseDeployer( unpause_pipelines=True, expose_pipelines=True ) definition_descriptor = preprocessor.process_definition_descriptor(definition_descriptor) rendering_result = renderer.render(definition_descriptor) info('deploying pipeline') deployment_result = deployer.deploy(rendering_result.definition_descriptor) if not deployment_result.deploy_status & DeployStatus.SUCCEEDED: fail('deployment failed')
def deploy_concourse_landscape( config_set: ConfigurationSet, deployment_name: str = 'concourse', timeout_seconds: int = 180, ): ensure_helm_setup() # Fetch all the necessary config config_factory = global_ctx().cfg_factory() concourse_cfg = config_set.concourse() # Kubernetes cluster config kubernetes_config_name = concourse_cfg.kubernetes_cluster_config() kubernetes_config = config_factory.kubernetes(kubernetes_config_name) # Container-registry config image_pull_secret_name = concourse_cfg.image_pull_secret() container_registry = config_factory.container_registry( image_pull_secret_name) cr_credentials = container_registry.credentials() # Helm config helm_chart_default_values_name = concourse_cfg.helm_chart_default_values_config( ) default_helm_values = config_factory.concourse_helmchart( helm_chart_default_values_name).raw helm_chart_values_name = concourse_cfg.helm_chart_values() custom_helm_values = config_factory.concourse_helmchart( helm_chart_values_name).raw # Proxy config if concourse_cfg.proxy(): proxy_cfg_name = concourse_cfg.proxy() proxy_cfg = config_factory.proxy(proxy_cfg_name) info('Creating config-maps for the mitm proxy ...') create_proxy_configmaps( proxy_cfg=proxy_cfg, namespace=deployment_name, ) info('Creating default image-pull-secret ...') create_image_pull_secret( credentials=cr_credentials, image_pull_secret_name=image_pull_secret_name, namespace=deployment_name, ) warning( 'Teams will not be set up properly on Concourse if the deployment times out, ' 'even if Helm eventually succeeds. In this case, run the deployment command again after ' 'Concourse is available.') instance_specific_helm_values = create_instance_specific_helm_values( concourse_cfg=concourse_cfg, config_factory=config_factory, ) chart_version = concourse_cfg.helm_chart_version() # Add proxy sidecars to instance specific values. # NOTE: Only works for helm chart version 3.8.0 or greater if concourse_cfg.proxy(): chart_version_semver = version.parse_to_semver( concourse_cfg.helm_chart_version()) min_version = version.parse_to_semver('3.8.0') if chart_version_semver >= min_version: instance_specific_helm_values = add_proxy_values( config_set=config_set, instance_specific_values=instance_specific_helm_values, ) else: fail( 'Proxy deployment requires the configured helm chart version to be at least 3.8.0' ) execute_helm_deployment( kubernetes_config, deployment_name, 'concourse/concourse', deployment_name, default_helm_values, custom_helm_values, instance_specific_helm_values, chart_version=chart_version, ) info('Waiting until the webserver can be reached ...') deployment_helper = kube_ctx.deployment_helper() is_web_deployment_available = deployment_helper.wait_until_deployment_available( namespace=deployment_name, name='concourse-web', timeout_seconds=timeout_seconds, ) if not is_web_deployment_available: fail( dedent("""No Concourse webserver reachable after {t} second(s). Check status of Pods created by "concourse-web"-deployment in namespace {ns} """).format( t=timeout_seconds, ns=deployment_name, )) info('Webserver became accessible.') # Even though the deployment is available, the ingress might need a few seconds to update. time.sleep(3) info('Setting teams on Concourse ...') set_teams(config=concourse_cfg)
def _ensure_submodule_exists(repo: git.Repo, path: str): '''Use GitPython to verify that a submodule with the given path exists in the repository.''' for submodule in repo.submodules: if submodule.path == path: return fail('No submodule with path {p} exists in the repository.'.format(p=path))
def update_submodule( repo_path: str, tree_ish: str, submodule_path: str, commit_hash: str, author: str, email: str, ): '''Update the submodule of a git-repository to a specific commit. Create a new commit, with the passed tree-ish as parent, in the given repository. Note that this implementation only supports toplevel submodules. To be removed in a future version. Parameters ------ repo_path : str Path to a directory containing an intialised git-repo with a submodule to update. tree_ish : str Valid tree-ish to use as base for creating the new commit. Used as parent for the commit to be created Example: 'master' for the head of the master-branch. submodule_path : str Path (relative to the repository root) to the submodule. Must be immediately below the root of the repository. commit_hash : str The hash the submodule should point to in the created commit. This should be a valid commit- hash in the submodule's repository. author : str, Will be set as author of the created commit email : str Will be set for the author of the created commit Returns ------ str The hexadecimal SHA-1 hash of the created commit ''' repo_path = existing_dir(os.path.abspath(repo_path)) not_empty(submodule_path) if '/' in submodule_path: fail('This implementation only supports toplevel submodules: {s}'.format(s=submodule_path)) not_empty(tree_ish) not_empty(commit_hash) not_empty(author) not_empty(email) repo = git.Repo(repo_path) _ensure_submodule_exists(repo, submodule_path) # Create mk-tree-parseable string-representation from given tree-ish. tree = repo.tree(tree_ish) tree_representation = _serialise_and_update_submodule(tree, submodule_path, commit_hash) # Pass the patched tree to git mk-tree using GitPython. We cannot do this in GitPython # directly as it does not support arbitrary tree manipulation. # We must keep a reference to auto_interrupt as it closes all streams to the subprocess # on finalisation auto_interrupt = repo.git.mktree(istream=subprocess.PIPE, as_process=True) process = auto_interrupt.proc stdout, _ = process.communicate(input=tree_representation.encode()) # returned string is byte-encoded and newline-terminated new_sha = stdout.decode('utf-8').strip() # Create a new commit in the repo's object database from the newly created tree. actor = git.Actor(author, email) parent_commit = repo.commit(tree_ish) commit = git.Commit.create_from_tree( repo=repo, tree=new_sha, parent_commits=[parent_commit], message='Upgrade submodule {s} to commit {c}'.format( s=submodule_path, c=commit_hash, ), author=actor, committer=actor, ) return commit.hexsha