Beispiel #1
0
 def test_extract_version_fields(self):
     self.assertEqual(util.extract_version_fields('1.2.3'), [1, 2, 3])
     self.assertEqual(util.extract_version_fields('1.2'), [1, 2])
     self.assertEqual(util.extract_version_fields('v1.2.3'), [1, 2, 3])
     self.assertEqual(util.extract_version_fields('v1.2'), [1, 2])
     self.assertRaises(IOError, util.extract_version_fields, 'v1.2', 3)
     self.assertRaises(IOError, util.extract_version_fields, '1.2', 3)
def release_calc_upgrade_tests(version):
    arch = 'x86_64'
    versions = util.get_release_calc_previous(version, arch)
    major, minor = util.extract_version_fields(version, at_least=2)[:2]
    curr_versions = [x for x in versions if f'{major}.{minor}' in x]
    prev_versions = [x for x in versions if f'{major}.{minor-1}' in x]

    def get_test_edges(version_list):
        test_edges = []
        sorted_list = util.sort_semver(version_list)
        test_edges += sorted_list[:5]  # add first 5
        test_edges += sorted_list[-5:]  # add last 5
        # 5 equally distributed between remaining
        remaining = sorted_list[5:-5]
        if len(remaining) < 5:
            test_edges += remaining
        else:
            step = len(remaining) // 5
            test_edges += remaining[step::step]
        return util.sort_semver(set(test_edges))

    edges = get_test_edges(curr_versions) + get_test_edges(prev_versions)
    print(','.join(edges))
Beispiel #3
0
def images_streams_prs(runtime, github_access_token, bug, interstitial,
                       ignore_ci_master, draft_prs, moist_run, add_labels):
    runtime.initialize(clone_distgits=False, clone_source=False)
    g = Github(login_or_token=github_access_token)
    github_user = g.get_user()

    major = runtime.group_config.vars['MAJOR']
    minor = runtime.group_config.vars['MINOR']
    interstitial = int(interstitial)

    master_major, master_minor = extract_version_fields(what_is_in_master(),
                                                        at_least=2)
    if not ignore_ci_master and (major > master_major or minor > master_minor):
        # ART building a release before is is in master. Too early to open PRs.
        runtime.logger.warning(
            f'Target {major}.{minor} has not been in master yet (it is tracking {master_major}.{master_minor}); skipping PRs'
        )
        exit(0)

    prs_in_master = (major == master_major
                     and minor == master_minor) and not ignore_ci_master

    pr_links = {}  # map of distgit_key to PR URLs associated with updates
    new_pr_links = {}
    skipping_dgks = set(
    )  # If a distgit key is skipped, it children will see it in this list and skip themselves.
    for image_meta in runtime.ordered_image_metas():
        dgk = image_meta.distgit_key
        logger = image_meta.logger
        logger.info('Analyzing image')

        alignment_prs_config = image_meta.config.content.source.ci_alignment.streams_prs

        if alignment_prs_config and alignment_prs_config.enabled is not Missing and not alignment_prs_config.enabled:
            # Make sure this is an explicit False. Missing means the default or True.
            logger.info('The image has alignment PRs disabled; ignoring')
            continue

        from_config = image_meta.config['from']
        if not from_config:
            logger.info('Skipping PRs since there is no configured .from')
            continue

        desired_parents = []
        builders = from_config.builder or []
        for builder in builders:
            upstream_image = resolve_upstream_from(runtime, builder)
            if not upstream_image:
                logger.warning(
                    f'Unable to resolve upstream image for: {builder}')
                break
            desired_parents.append(upstream_image)

        parent_upstream_image = resolve_upstream_from(runtime, from_config)
        if len(desired_parents) != len(builders) or not parent_upstream_image:
            logger.warning(
                'Unable to find all ART equivalent upstream images for this image'
            )
            continue

        desired_parents.append(parent_upstream_image)
        desired_parent_digest = calc_parent_digest(desired_parents)
        logger.info(
            f'Found desired FROM state of: {desired_parents} with digest: {desired_parent_digest}'
        )

        source_repo_url, source_repo_branch = _get_upstream_source(
            runtime, image_meta)

        if not source_repo_url:
            # No upstream to clone; no PRs to open
            continue

        public_repo_url, public_branch = runtime.get_public_upstream(
            source_repo_url)
        if not public_branch:
            public_branch = source_repo_branch

        # There are two standard upstream branching styles:
        # release-4.x   : CI fast-forwards from master when appropriate
        # openshift-4.x : Upstream team manages completely.
        # For the former style, we may need to open the PRs against master.
        # For the latter style, always open directly against named branch
        if public_branch.startswith('release-') and prs_in_master:
            # TODO: auto-detect default branch for repo instead of assuming master
            public_branch = 'master'

        _, org, repo_name = split_git_url(public_repo_url)

        public_source_repo = g.get_repo(f'{org}/{repo_name}')

        try:
            fork_repo_name = f'{github_user.login}/{repo_name}'
            fork_repo = g.get_repo(fork_repo_name)
        except UnknownObjectException:
            # Repo doesn't exist; fork it
            fork_repo = github_user.create_fork(public_source_repo)

        fork_branch_name = f'art-consistency-{runtime.group_config.name}-{dgk}'
        fork_branch_head = f'{github_user.login}:{fork_branch_name}'

        fork_branch = None
        try:
            fork_branch = fork_repo.get_branch(fork_branch_name)
        except UnknownObjectException:
            # Doesn't presently exist and will need to be created
            pass
        except GithubException as ge:
            # This API seems to return 404 instead of UnknownObjectException.
            # So allow 404 to pass through as well.
            if ge.status != 404:
                raise

        public_repo_url = convert_remote_git_to_ssh(public_repo_url)
        clone_dir = os.path.join(runtime.working_dir, 'clones', dgk)
        # Clone the private url to make the best possible use of our doozer_cache
        runtime.git_clone(source_repo_url, clone_dir)

        with Dir(clone_dir):
            exectools.cmd_assert(f'git remote add public {public_repo_url}')
            exectools.cmd_assert(
                f'git remote add fork {convert_remote_git_to_ssh(fork_repo.git_url)}'
            )
            exectools.cmd_assert('git fetch --all')

            # The path to the Dockerfile in the target branch
            if image_meta.config.content.source.dockerfile is not Missing:
                # Be aware that this attribute sometimes contains path elements too.
                dockerfile_name = image_meta.config.content.source.dockerfile
            else:
                dockerfile_name = "Dockerfile"

            df_path = Dir.getpath()
            if image_meta.config.content.source.path:
                dockerfile_name = os.path.join(
                    image_meta.config.content.source.path, dockerfile_name)

            df_path = df_path.joinpath(dockerfile_name)

            fork_branch_parent_digest = None
            fork_branch_parents = None
            if fork_branch:
                # If there is already an art reconciliation branch, get an MD5
                # of the FROM images in the Dockerfile in that branch.
                exectools.cmd_assert(f'git checkout fork/{fork_branch_name}')
                fork_branch_parent_digest, fork_branch_parents = extract_parent_digest(
                    df_path)

            # Now change over to the target branch in the actual public repo
            exectools.cmd_assert(f'git checkout public/{public_branch}')

            source_branch_parent_digest, source_branch_parents = extract_parent_digest(
                df_path)

            if desired_parent_digest == source_branch_parent_digest:
                green_print(
                    'Desired digest and source digest match; Upstream is in a good state'
                )
                continue

            yellow_print(
                f'Upstream dockerfile does not match desired state in {public_repo_url}/blob/{public_branch}/{dockerfile_name}'
            )
            print(
                f'Desired parents: {desired_parents} ({desired_parent_digest})'
            )
            print(
                f'Source parents: {source_branch_parents} ({source_branch_parent_digest})'
            )
            print(
                f'Fork branch digest: {fork_branch_parents} ({fork_branch_parent_digest})'
            )

            first_commit_line = f"Updating {image_meta.name} builder & base images to be consistent with ART"
            reconcile_info = f"Reconciling with {convert_remote_git_to_https(runtime.gitdata.origin_url)}/tree/{runtime.gitdata.commit_hash}/images/{os.path.basename(image_meta.config_filename)}"

            diff_text = None
            if fork_branch_parent_digest != desired_parent_digest:
                # The fork branch either does not exist, or does not have the desired parent image state
                # Let's create a local branch that will contain the Dockerfile in the state we desire.
                work_branch_name = '__mod'
                exectools.cmd_assert(f'git checkout public/{public_branch}')
                exectools.cmd_assert(f'git checkout -b {work_branch_name}')
                with df_path.open(mode='r+') as handle:
                    dfp = DockerfileParser(cache_content=True,
                                           fileobj=io.BytesIO())
                    dfp.content = handle.read()
                    dfp.parent_images = desired_parents
                    handle.truncate(0)
                    handle.seek(0)
                    handle.write(dfp.content)

                diff_text, _ = exectools.cmd_assert(f'git diff {str(df_path)}')

                if not moist_run:
                    exectools.cmd_assert(f'git add {str(df_path)}')
                    commit_prefix = ''
                    if repo_name.startswith('kubernetes'):
                        # couple repos have this requirement; openshift/kubernetes & openshift/kubernetes-autoscaler.
                        # This check may suffice  for now, but it may eventually need to be in doozer metadata.
                        commit_prefix = 'UPSTREAM: <carry>: '
                    commit_msg = f"""{commit_prefix}{first_commit_line}
{reconcile_info}
"""
                    exectools.cmd_assert(
                        f'git commit -m "{commit_msg}"'
                    )  # Add a commit atop the public branch's current state
                    # Create or update the remote fork branch
                    exectools.cmd_assert(
                        f'git push --force fork {work_branch_name}:{fork_branch_name}'
                    )

            # At this point, we have a fork branch in the proper state
            pr_body = f"""{first_commit_line}
{reconcile_info}

If you have any questions about this pull request, please reach out to `@art-team` in the `#aos-art` coreos slack channel.
"""

            parent_pr_url = None
            parent_meta = image_meta.resolve_parent()
            if parent_meta:
                if parent_meta.distgit_key in skipping_dgks:
                    skipping_dgks.add(image_meta.distgit_key)
                    yellow_print(
                        f'Image has parent {parent_meta.distgit_key} which was skipped; skipping self: {image_meta.distgit_key}'
                    )
                    continue

                parent_pr_url = pr_links.get(parent_meta.distgit_key, None)
                if parent_pr_url:
                    if parent_meta.config.content.source.ci_alignment.streams_prs.merge_first:
                        skipping_dgks.add(image_meta.distgit_key)
                        yellow_print(
                            f'Image has parent {parent_meta.distgit_key} open PR ({parent_pr_url}) and streams_prs.merge_first==True; skipping PR opening for this image {image_meta.distgit_key}'
                        )
                        continue

                    # If the parent has an open PR associated with it, make sure the
                    # child PR notes that the parent PR should merge first.
                    pr_body += f'\nDepends on {parent_pr_url} . Allow it to merge and then run `/test all` on this PR.'

            # Let's see if there is a PR opened
            open_prs = list(
                public_source_repo.get_pulls(state='open',
                                             head=fork_branch_head))
            if open_prs:
                existing_pr = open_prs[0]
                # Update body, but never title; The upstream team may need set something like a Bug XXXX: there.
                # Don't muck with it.

                if alignment_prs_config.auto_label and add_labels:
                    # If we are to automatically add labels to this upstream PR, do so.
                    existing_pr.set_labels(*alignment_prs_config.auto_label)

                existing_pr.edit(body=pr_body)
                pr_url = existing_pr.html_url
                pr_links[dgk] = pr_url
                yellow_print(
                    f'A PR is already open requesting desired reconciliation with ART: {pr_url}'
                )
                continue

            # Otherwise, we need to create a pull request
            if moist_run:
                pr_links[dgk] = f'MOIST-RUN-PR:{dgk}'
                green_print(
                    f'Would have opened PR against: {public_source_repo.html_url}/blob/{public_branch}/{dockerfile_name}.'
                )
                if parent_pr_url:
                    green_print(
                        f'Would have identified dependency on PR: {parent_pr_url}.'
                    )
                if diff_text:
                    yellow_print(diff_text)
                else:
                    yellow_print(
                        f'Fork from which PR would be created ({fork_branch_head}) is populated with desired state.'
                    )
            else:
                pr_title = first_commit_line
                if bug:
                    pr_title = f'Bug {bug}: {pr_title}'
                new_pr = public_source_repo.create_pull(title=pr_title,
                                                        body=pr_body,
                                                        base=public_branch,
                                                        head=fork_branch_head,
                                                        draft=draft_prs)
                if alignment_prs_config.auto_label and add_labels:
                    # If we are to automatically add labels to this upstream PR, do so.
                    new_pr.set_labels(*alignment_prs_config.auto_label)
                pr_msg = f'A new PR has been opened: {new_pr.html_url}'
                pr_links[dgk] = new_pr.html_url
                new_pr_links[dgk] = new_pr.html_url
                logger.info(pr_msg)
                yellow_print(pr_msg)
                print(
                    f'Sleeping {interstitial} seconds before opening another PR to prevent flooding prow...'
                )
                time.sleep(interstitial)

    if new_pr_links:
        print('Newly opened PRs:')
        print(yaml.safe_dump(new_pr_links))

    if pr_links:
        print('Currently open PRs:')
        print(yaml.safe_dump(pr_links))