Пример #1
0
def get_short_os_name() -> str:
    platform_system = platform.system()
    os_name = ''
    os_version = ''
    if platform_system == 'Linux':
        os_release_path = '/etc/os-release'
        if os.path.exists(os_release_path):
            os_release_str = read_file(os_release_path)
            os_release_lines = [line.strip() for line in os_release_str.split('\n') if line.strip()]
            os_release_tuples = [line.split('=') for line in os_release_lines]
            kvs = {t[0]: t[1].strip('"') for t in os_release_tuples if len(t) == 2}
            name = kvs.get('NAME')
            version_id = kvs.get('VERSION_ID')
            if name and version_id:
                os_name = name.split()[0].lower()
                if os_name == 'ubuntu':
                    # For Ubuntu we will keep the full version, such as 18.04.
                    os_version = version_id
                else:
                    os_version = version_id.split('.')[0]
            else:
                raise ValueError(
                    f"Could not determine OS name and version from the contents of "
                    f"{os_release_path}. Parsed data: {kvs}. Raw contents:\n{os_release_str}")

        else:
            raise ValueError(
                f"Cannot identify OS release. File {os_release_path} is not present.")
    elif platform_system == 'Darwin':
        os_name = 'macos'
    else:
        raise ValueError(f"Unsupported platform: {platform_system}")

    return os_name + os_version
Пример #2
0
    def _read_path_file(self, path_file_name: str) -> Optional[str]:
        path_file_path = os.path.join(self.build_root, path_file_name)
        if not os.path.exists(path_file_path):
            return None

        dir_path = read_file(path_file_path).strip()
        if not os.path.exists(dir_path):
            raise IOError(
                "Path contained in file %s does not exist or is not a directory: '%s'"
                % (path_file_path, dir_path))
        return dir_path
Пример #3
0
def get_github_token(token_file_path: Optional[str]) -> Optional[str]:
    github_token: Optional[str]
    if token_file_path:
        github_token = read_file(token_file_path).strip()
    else:
        github_token = os.getenv('GITHUB_TOKEN')
    if github_token is None:
        return github_token

    if len(github_token) != 40:
        raise ValueError(f"Invalid GitHub token length: {len(github_token)}, expected 40.")
    return github_token
Пример #4
0
def get_yb_pgbackend_link_cmd(build_root: str) -> Tuple[str, List[str]]:
    """
    Returns a tuple containing the Postgres backend build directory and a list representation
    of the linker command line for the yb_pgbackend library.
    """
    pg_backend_build_dir = os.path.join(build_root, 'postgres_build', 'src',
                                        'backend')
    if not os.path.exists(pg_backend_build_dir):
        raise IOError("Directory does not exist: %s" % pg_backend_build_dir)

    prefix_str = 'link_cmd_libyb_pgbackend.so.'
    matched_files = [
        os.fspath(p)
        for p in Path(pg_backend_build_dir).glob('%s*' % prefix_str)
    ]
    if len(matched_files) != 1:
        raise ValueError(
            "Looking for the build command for the yb_pgbackend library, failed to find exactly "
            "one file starting with %s in %s. Got %s" %
            (prefix_str, pg_backend_build_dir, matched_files))
    return pg_backend_build_dir, read_file(matched_files[0]).strip().split()
Пример #5
0
    def update_archive_metadata_file(self) -> None:
        yb_version = read_file(os.path.join(YB_SRC_ROOT, 'version.txt')).strip()

        archive_metadata_path = get_archive_metadata_file_path()
        logging.info(f"Updating third-party archive metadata file in {archive_metadata_path}")

        github_client = Github(get_github_token(self.github_token_file_path))
        repo = github_client.get_repo('yugabyte/yugabyte-db-thirdparty')

        releases_by_commit: Dict[str, ReleaseGroup] = {}
        num_skipped_old_tag_format = 0
        num_skipped_wrong_branch = 0
        num_releases_found = 0

        releases = []
        get_releases_start_time_sec = time.time()
        try:
            for release in repo.get_releases():
                releases.append(release)
        except GithubException as exc:
            if 'Only the first 1000 results are available.' in str(exc):
                logging.info("Ignoring exception: %s", exc)
            else:
                raise exc
        logging.info("Time spent to iterate all releases: %.1f sec",
                     time.time() - get_releases_start_time_sec)

        for release in releases:
            sha: str = release.target_commitish
            assert(isinstance(sha, str))

            if SHA_HASH.match(sha) is None:
                sha = repo.get_commit(sha).sha

            tag_name = release.tag_name
            if len(tag_name.split('-')) <= 2:
                logging.debug(f"Skipping release tag: {tag_name} (old format, too few components)")
                num_skipped_old_tag_format += 1
                continue
            if self.tag_filter_pattern and not self.tag_filter_pattern.match(tag_name):
                logging.info(f'Skipping tag {tag_name}, does not match the filter')
                continue

            try:
                yb_dep_release = GitHubThirdPartyRelease(release, target_commitish=sha)
            except SkipThirdPartyReleaseException as ex:
                logging.warning("Skipping release: %s", ex)
                continue

            if not yb_dep_release.is_consistent_with_yb_version(yb_version):
                logging.debug(
                    f"Skipping release tag: {tag_name} (does not match version {yb_version}")
                num_skipped_wrong_branch += 1
                continue

            if sha not in releases_by_commit:
                releases_by_commit[sha] = ReleaseGroup(sha)

            num_releases_found += 1
            logging.debug(f"Found release: {yb_dep_release}")
            releases_by_commit[sha].add_release(yb_dep_release)

        if num_skipped_old_tag_format > 0:
            logging.info(f"Skipped {num_skipped_old_tag_format} releases due to old tag format")
        if num_skipped_wrong_branch > 0:
            logging.info(f"Skipped {num_skipped_wrong_branch} releases due to branch mismatch")
        logging.info(
            f"Found {num_releases_found} releases for {len(releases_by_commit)} different commits")

        latest_group_by_max = max(
            releases_by_commit.values(), key=ReleaseGroup.get_max_creation_timestamp)
        latest_group_by_min = max(
            releases_by_commit.values(), key=ReleaseGroup.get_min_creation_timestamp)
        if latest_group_by_max is not latest_group_by_min:
            raise ValueError(
                "Overlapping releases for different commits. No good way to identify latest "
                "release: e.g. {latest_group_by_max.sha} and {latest_group_by_min.sha}.")

        latest_group: ReleaseGroup = latest_group_by_max

        latest_release_sha = latest_group.sha
        logging.info(
            f"Latest released yugabyte-db-thirdparty commit: {latest_release_sha}. "
            f"Released at: {latest_group.get_max_creation_timestamp()}.")

        groups_to_use: List[ReleaseGroup] = [latest_group]

        if self.also_use_commits:
            for extra_commit in self.also_use_commits:
                logging.info(f"Additional manually specified commit to use: {extra_commit}")
                if extra_commit == latest_release_sha:
                    logging.info(
                        f"(already matches the latest commit {latest_release_sha}, skipping.)")
                    continue
                if extra_commit not in releases_by_commit:
                    raise ValueError(
                        f"No releases found for user-specified commit {extra_commit}. "
                        "Please check if there is an error.")
                groups_to_use.append(releases_by_commit[extra_commit])

        new_metadata: Dict[str, Any] = {
            SHA_FOR_LOCAL_CHECKOUT_KEY: latest_release_sha,
            'archives': []
        }
        releases_to_use: List[GitHubThirdPartyRelease] = [
            rel for release_group in groups_to_use
            for rel in release_group.releases
            if rel.tag not in BROKEN_TAGS
        ]

        releases_by_key_without_tag: DefaultDict[Tuple[str, ...], List[GitHubThirdPartyRelease]] = \
            defaultdict(list)

        num_valid_releases = 0
        num_invalid_releases = 0
        for yb_thirdparty_release in releases_to_use:
            if yb_thirdparty_release.validate_url():
                num_valid_releases += 1
                releases_by_key_without_tag[
                    yb_thirdparty_release.get_sort_key(include_tag=False)
                ].append(yb_thirdparty_release)
            else:
                num_invalid_releases += 1
        logging.info(
            f"Valid releases found: {num_valid_releases}, invalid releases: {num_invalid_releases}")

        filtered_releases_to_use = []
        for key_without_tag, releases_for_key in releases_by_key_without_tag.items():
            if len(releases_for_key) > 1:
                picked_release = max(releases_for_key, key=lambda r: r.tag)
                logging.info(
                    "Multiple releases found for the same key (excluding the tag). "
                    "Using the latest one: %s\n"
                    "Key: %s.\nReleases:\n  %s" % (
                        picked_release,
                        key_without_tag,
                        '\n  '.join([str(r) for r in releases_for_key])))
                filtered_releases_to_use.append(picked_release)
            else:
                filtered_releases_to_use.append(releases_for_key[0])

        filtered_releases_to_use.sort(key=GitHubThirdPartyRelease.get_sort_key)

        for yb_thirdparty_release in filtered_releases_to_use:
            new_metadata['archives'].append(yb_thirdparty_release.as_dict())

        write_yaml_file(new_metadata, archive_metadata_path)
        logging.info(
            f"Wrote information for {len(filtered_releases_to_use)} pre-built "
            f"yugabyte-db-thirdparty archives to {archive_metadata_path}.")
Пример #6
0
    def update_archive_metadata_file(self) -> None:
        yb_version = read_file(os.path.join(YB_SRC_ROOT,
                                            'version.txt')).strip()

        archive_metadata_path = get_archive_metadata_file_path()
        logging.info(
            f"Updating third-party archive metadata file in {archive_metadata_path}"
        )

        github_client = Github(get_github_token(self.github_token_file_path))
        repo = github_client.get_repo('yugabyte/yugabyte-db-thirdparty')

        releases_by_commit: Dict[str, ReleaseGroup] = {}
        num_skipped_old_tag_format = 0
        num_skipped_wrong_branch = 0
        num_releases_found = 0

        for release in repo.get_releases():
            sha: str = release.target_commitish
            assert (isinstance(sha, str))
            tag_name = release.tag_name
            if len(tag_name.split('-')) <= 2:
                logging.debug(
                    f"Skipping release tag: {tag_name} (old format, too few components)"
                )
                num_skipped_old_tag_format += 1
                continue
            if self.tag_filter_pattern and not self.tag_filter_pattern.match(
                    tag_name):
                logging.info(
                    f'Skipping tag {tag_name}, does not match the filter')
                continue

            yb_dep_release = GitHubThirdPartyRelease(release)
            if not yb_dep_release.is_consistent_with_yb_version(yb_version):
                logging.debug(
                    f"Skipping release tag: {tag_name} (does not match version {yb_version}"
                )
                num_skipped_wrong_branch += 1
                continue

            if sha not in releases_by_commit:
                releases_by_commit[sha] = ReleaseGroup(sha)

            num_releases_found += 1
            logging.info(f"Found release: {yb_dep_release}")
            releases_by_commit[sha].add_release(yb_dep_release)

        if num_skipped_old_tag_format > 0:
            logging.info(
                f"Skipped {num_skipped_old_tag_format} releases due to old tag format"
            )
        if num_skipped_wrong_branch > 0:
            logging.info(
                f"Skipped {num_skipped_wrong_branch} releases due to branch mismatch"
            )
        logging.info(
            f"Found {num_releases_found} releases for {len(releases_by_commit)} different commits"
        )
        latest_group_by_max = max(releases_by_commit.values(),
                                  key=ReleaseGroup.get_max_creation_timestamp)
        latest_group_by_min = max(releases_by_commit.values(),
                                  key=ReleaseGroup.get_min_creation_timestamp)
        if latest_group_by_max is not latest_group_by_min:
            raise ValueError(
                "Overlapping releases for different commits. No good way to identify latest "
                "release: e.g. {latest_group_by_max.sha} and {latest_group_by_min.sha}."
            )

        latest_group = latest_group_by_max

        sha = latest_group.sha
        logging.info(
            f"Latest released yugabyte-db-thirdparty commit: f{sha}. "
            f"Released at: {latest_group.get_max_creation_timestamp()}.")

        new_metadata: Dict[str, Any] = {
            SHA_FOR_LOCAL_CHECKOUT_KEY: sha,
            'archives': []
        }
        releases_for_one_commit = [
            rel for rel in latest_group.releases if rel.tag not in BROKEN_TAGS
        ]

        releases_by_key_without_tag: DefaultDict[Tuple[str, ...], List[GitHubThirdPartyRelease]] = \
            defaultdict(list)

        num_valid_releases = 0
        num_invalid_releases = 0
        for yb_thirdparty_release in releases_for_one_commit:
            if yb_thirdparty_release.validate_url():
                num_valid_releases += 1
                releases_by_key_without_tag[yb_thirdparty_release.get_sort_key(
                    include_tag=False)].append(yb_thirdparty_release)
            else:
                num_invalid_releases += 1
        logging.info(
            f"Valid releases found: {num_valid_releases}, invalid releases: {num_invalid_releases}"
        )

        filtered_releases_for_one_commit = []
        for key_without_tag, releases_for_key in releases_by_key_without_tag.items(
        ):
            if len(releases_for_key) > 1:
                picked_release = max(releases_for_key, key=lambda r: r.tag)
                logging.info(
                    "Multiple releases found for the same key (excluding the tag). "
                    "Using the latest one: %s\n"
                    "Key: %s.\nReleases:\n  %s" %
                    (picked_release, key_without_tag, '\n  '.join(
                        [str(r) for r in releases_for_key])))
                filtered_releases_for_one_commit.append(picked_release)
            else:
                filtered_releases_for_one_commit.append(releases_for_key[0])

        filtered_releases_for_one_commit.sort(
            key=GitHubThirdPartyRelease.get_sort_key)

        for yb_thirdparty_release in filtered_releases_for_one_commit:
            new_metadata['archives'].append(yb_thirdparty_release.as_dict())

        write_yaml_file(new_metadata, archive_metadata_path)
        logging.info(
            f"Wrote information for {len(filtered_releases_for_one_commit)} pre-built "
            f"yugabyte-db-thirdparty archives to {archive_metadata_path}.")