Ejemplo n.º 1
0
    def register_source_alias(self, alias, path):
        self.logger.info("Registering source alias %s: %s" % (alias, path))
        path = os.path.abspath(path)
        assertion.isdir(path, "Error registering source alias %s" % alias)
        self.source_paths[alias] = path
        with Dir(path):
            origin_url = "?"
            rc1, out_origin, err_origin = exectools.cmd_gather(
                ["git", "config", "--get", "remote.origin.url"])
            if rc1 == 0:
                origin_url = out_origin.strip()
                # Usually something like "[email protected]:openshift/origin.git"
                # But we want an https hyperlink like http://github.com/openshift/origin
                if origin_url.startswith("git@"):
                    origin_url = origin_url[4:]  # remove git@
                    origin_url = origin_url[:-4]  # remove .git
                    origin_url = origin_url.replace(":", "/", 1)  # replace first colon with /
                    origin_url = "https://%s" % origin_url
            else:
                self.logger.error("Failed acquiring origin url for source alias %s: %s" % (alias, err_origin))

            branch = "?"
            rc2, out_branch, err_branch = exectools.cmd_gather(
                ["git", "rev-parse", "--abbrev-ref", "HEAD"])
            if rc2 == 0:
                branch = out_branch.strip()
            else:
                self.logger.error("Failed acquiring origin branch for source alias %s: %s" % (alias, err_branch))

            self.add_record("source_alias", alias=alias, origin_url=origin_url, branch=branch, path=path)
Ejemplo n.º 2
0
    def _build_rpm(self, scratch, record, terminate_event):
        """
        The part of `build_container` which actually starts the build,
        separated for clarity.
        """
        with Dir(self.source_path):
            self.logger.info("Building rpm: %s" % self.rpm_name)

            cmd_list = ['tito', 'release', '--debug', '--yes', '--test']
            if scratch:
                cmd_list.append('--scratch')
            cmd_list.append('aos')

            # Run the build with --nowait so that we can immediately get information about the brew task
            rc, out, err = exectools.cmd_gather(cmd_list)

            if rc != 0:
                # Probably no point in continuing.. can't contact brew?
                self.logger.info("Unable to create brew task: out={}  ; err={}".format(out, err))
                return False

            # Otherwise, we should have a brew task we can monitor listed in the stdout.
            out_lines = out.splitlines()

            # Look for a line like: "Created task: 13949050" . Extract the identifier.
            task_id = next((created_line.split(":")[1]).strip() for created_line in out_lines if
                           created_line.startswith("Created task:"))

            record["task_id"] = task_id

            # Look for a line like: "Task info: https://brewweb.engineering.redhat.com/brew/taskinfo?taskID=13948942"
            task_url = next((info_line.split(":", 1)[1]).strip() for info_line in out_lines if
                            info_line.startswith("Task info:"))

            self.logger.info("Build running: {} - {}".format(self.rpm_name, task_url))

            record["task_url"] = task_url

            # Now that we have the basics about the task, wait for it to complete
            error = watch_task(self.logger.info, task_id, terminate_event)

            # Gather brew-logs
            logs_dir = "%s/%s" % (self.runtime.brew_logs_dir, self.name)
            logs_rc, _, logs_err = exectools.cmd_gather(
                ["brew", "download-logs", "-d", logs_dir, task_id])

            if logs_rc != 0:
                self.logger.info("Error downloading build logs from brew for task %s: %s" % (task_id, logs_err))

            if error is not None:
                # An error occurred. We don't have a viable build.
                self.logger.info("Error building rpm: {}, {}".format(task_url, error))
                return False

            self.logger.info("Successfully built rpm: {} ; {}".format(self.rpm_name, task_url))
        return True
Ejemplo n.º 3
0
    def auto_version(self, repo_type):
        """
        Find and return the version of the atomic-openshift package in the OCP
        RPM repository.

        This repository is the primary input for OCP images.  The group_config
        for a group specifies the location for both signed and unsigned
        rpms.  The caller must indicate which to use.
        """

        repo_url = self.repos['rhel-server-ose-rpms'].baseurl(
            repo_type, 'x86_64')
        self.logger.info(
            "Getting version from atomic-openshift package in {}".format(
                repo_url))

        # create a randomish repo name to avoid erroneous cache hits
        repoid = "oit" + datetime.datetime.now().strftime("%s")
        version_query = [
            "/usr/bin/repoquery", "--quiet", "--tempcache", "--repoid", repoid,
            "--repofrompath", repoid + "," + repo_url, "--queryformat",
            "%{VERSION}", "atomic-openshift"
        ]
        rc, auto_version, err = exectools.cmd_gather(version_query)
        if rc != 0:
            raise RuntimeError(
                "Unable to get OCP version from RPM repository: {}".format(
                    err))

        version = "v" + auto_version.strip()

        self.logger.info("Auto-detected OCP version: {}".format(version))
        return version
Ejemplo n.º 4
0
    def get_latest_build_info(self):
        """
        Queries brew to determine the most recently built release of the component
        associated with this image. This method does not rely on the "release"
        label needing to be present in the Dockerfile.

        :return: A tuple: (component name, version, release); e.g. ("registry-console-docker", "v3.6.173.0.75", "1")
        """

        component_name = self.get_component_name()

        tag = "{}-candidate".format(self.branch())

        rc, stdout, stderr = exectools.cmd_gather(
            ["brew", "latest-build", tag, component_name])

        assertion.success(rc, "Unable to search brew builds: %s" % stderr)

        latest = stdout.strip().splitlines()[-1].split(' ')[0]

        if not latest.startswith(component_name):
            # If no builds found, `brew latest-build` output will appear as:
            # Build                                     Tag                   Built by
            # ----------------------------------------  --------------------  ----------------
            raise IOError("No builds detected for %s using tag: %s" %
                          (self.qualified_name, tag))

        # latest example: "registry-console-docker-v3.6.173.0.75-1""
        name, version, release = latest.rsplit(
            "-", 2)  # [ "registry-console-docker", "v3.6.173.0.75", "1"]

        return name, version, release
Ejemplo n.º 5
0
    def create_tag(self, scratch):
        if not self.tag:
            raise ValueError('Must run set_nvr() before calling!')

        with Dir(self.source_path):
            if not scratch:
                exectools.cmd_assert('git tag {}'.format(self.tag))
            rc, sha, err = exectools.cmd_gather('git rev-parse HEAD')
            self.commit_sha = sha.strip()
Ejemplo n.º 6
0
def get_brew_buildinfo(build):
    """Get the buildinfo of a brew build from brew.

    :param str|int build: A build NVR or numeric ID

    :return dict buildinfo: A dict of the build info. Certain fields
    may be transformed such as 'Tags' turned into a list, and 'Extra'
    into a proper dict object instead of a JSON string

    :return str buildinfo: The raw unparsed buildinfo as a string

Note: This is different from get_brew_build in that this function
queries brew directly using the 'brew buildinfo' command. Whereas,
get_brew_build queries the Errata Tool API for other information.

This function will give information not provided by ET: build tags,
finished date, built by, etc.
    """
    query_string = "brew buildinfo {nvr}".format(nvr=build.nvr)
    rc, stdout, stderr = exectools.cmd_gather(shlex.split(query_string))
    buildinfo = {}

    if as_string and rc == 0:
        return stdout
    else:
        buildinfo = {}

        # These keys indicate content which can be whitespace split into
        # an array, ex: the value of the "Tags" key comes as a string, but
        # we can split it on space characters and have a list of tags
        # instead.
        split_keys = ['Tags']
        # The value of these keys contain json strings which we can
        # attempt to load into a datastructure
        json_keys = ['Extra']
        for line in stdout.splitlines():
            key, token, rest = line.partition(': ')
            if key in split_keys:
                buildinfo[key] = rest.split(' ')
                continue
            elif key in json_keys:
                try:
                    # Why would we use the ast module for this? Is
                    # json.loads not enough? Actually, no, it isn't
                    # enough. The <airquotes>JSON</airquotes> returned
                    # from 'brew buildinfo' uses single-quotes to wrap
                    # strings, that is not valid json. Whereas,
                    # ast.literal_eval() can handle that and load the
                    # string into a structure we can work with
                    buildinfo[key] = ast.literal_eval(rest)
                except Exception as e:
                    buildinfo[key] = rest
                    continue
            else:
                buildinfo[key] = rest
        return buildinfo
Ejemplo n.º 7
0
def get_tagged_image_builds(tag, latest=True):
    """Wrapper around shelling out to run 'brew list-tagged' for a given tag.

    :param str tag: The tag to list builds from
    :param bool latest: Only show the single latest build of a package
    """
    if latest:
        latest_option = '--latest'
    else:
        latest_option = ''

    query_string = "brew list-tagged {tag} {latest} --type=image --quiet".format(
        tag=tag, latest=latest_option)
    # --latest - Only the last build for that package
    # --type=image - Only show container images builds
    # --quiet - Omit field headers in output

    return exectools.cmd_gather(shlex.split(query_string))
Ejemplo n.º 8
0
    def test_gather_fail(self):
        """
        """

        (status, stdout, stderr) = exectools.cmd_gather(
            ["/usr/bin/sed", "-e", "f"])

        status_expected = 1
        stdout_expected = ""
        stderr_expected = "/usr/bin/sed: -e expression #1, char 1: unknown command: `f'\n"

        self.assertEquals(status_expected, status)
        self.assertEquals(stdout, stdout_expected)
        self.assertEquals(stderr, stderr_expected)

        # check that the log file has all of the tests.
        log_file = open(self.test_file, 'r')
        lines = log_file.readlines()

        self.assertEquals(len(lines), 6)
Ejemplo n.º 9
0
    def test_gather_success(self):
        """
        """

        (status, stdout, stderr) = exectools.cmd_gather(
            "/usr/bin/echo hello there")
        status_expected = 0
        stdout_expected = "hello there\n"
        stderr_expected = ""

        self.assertEquals(status_expected, status)
        self.assertEquals(stdout, stdout_expected)
        self.assertEquals(stderr, stderr_expected)

        # check that the log file has all of the tests.

        log_file = open(self.test_file, 'r')
        lines = log_file.readlines()

        self.assertEquals(len(lines), 6)
Ejemplo n.º 10
0
def get_tagged_rpm_builds(tag, arch='src', latest=True):
    """Wrapper around shelling out to run 'brew list-tagged' for a given tag.

    :param str tag: The tag to list builds from
    :param str arch: Filter results to only this architecture
    :param bool latest: Only show the single latest build of a package
    """
    if latest is True:
        latest_flag = "--latest"
    else:
        latest_flag = ""

    query_string = "brew list-tagged {tag} {latest} --rpm --quiet --arch {arch}".format(
        tag=tag, latest=latest_flag, arch=arch)
    # --latest - Only the last build for that package
    # --rpm - Only show RPM builds
    # --quiet - Omit field headers in output
    # --arch {arch} - Only show builds of this architecture

    return exectools.cmd_gather(shlex.split(query_string))
Ejemplo n.º 11
0
def get_tagged_rpm_names(branch, arch='x86_64'):
    query_string = "brew list-tagged --inherit --latest --rpms --arch {arch} {branch}-container-build"
    # --latest - Only the last build for that package
    # --rpm - Only show RPM builds
    # --quiet - Omit field headers in output
    # --arch {arch} - Only show builds of this architecture

    rpms = []

    arches = ['noarch', arch]  # always gather noarch
    for a in arches:
        rc, stdout, stderr = exectools.cmd_gather(
            shlex.split(query_string.format(branch=branch, arch=a)))
        if rc == 0:
            rpms.extend(stdout.splitlines())
        else:
            raise ValueError(stderr)

    result = set([splitRPMFilename(line.strip())[0] for line in rpms])

    return result
Ejemplo n.º 12
0
def check_rpm_buildroot(name, branch, arch='x86_64'):
    """
    Query the buildroot used by ODCS to determine if a given RPM name
    is provided by ODCS for the given arch.
    :param str name: RPM name
    :param str branch: Current building branch, such as rhaos-3.10-rhel-7
    :param str arch: CPU architecture to search
    """
    args = locals()
    query = 'repoquery --repofrompath foo,"http://download-node-02.eng.bos.redhat.com/brewroot/repos/{branch}-ppc64le-container-build/latest/{arch}" --repoid=foo --arch {arch},noarch --whatprovides {name}'
    rc, stdout, stderr = exectools.cmd_gather(query.format(**args))
    if rc == 0:
        result = []
        stdout = stdout.strip()
        for rpm in stdout.strip().splitlines():
            n = rpm.split(':')[0]
            n = '-'.join(n.split('-')[0:-1])
            result.append(n)

        return result
    else:
        raise ValueError(stderr)
Ejemplo n.º 13
0
    def resolve_metadata(self):
        """
        The group control data can be on a local filesystem, in a git
        repository that can be checked out, or some day in a database

        If the scheme is empty, assume file:///...
        Allow http, https, ssh and ssh+git (all valid git clone URLs)
        """

        if self.metadata_dir is None:
            self.metadata_dir = constants.OCP_BUILD_DATA_RW

        schemes = ['ssh', 'ssh+git', "http", "https"]

        self.logger.info('Using {} for metadata'.format(self.metadata_dir))

        md_url = urlparse.urlparse(self.metadata_dir)
        if md_url.scheme in schemes or (md_url.scheme == ''
                                        and ':' in md_url.path):
            # Assume this is a git repo to clone
            #
            # An empty scheme with a colon in the path is likely an "scp" style
            # path: ala [email protected]:owner/path
            # determine where to put it
            md_name = os.path.splitext(os.path.basename(md_url.path))[0]
            md_destination = os.path.join(self.working_dir, md_name)
            clone_data = True
            if os.path.isdir(md_destination):
                self.logger.info(
                    'Metadata clone directory already exists, checking commit sha'
                )
                with Dir(md_destination):
                    rc, out, err = exectools.cmd_gather(
                        ["git", "ls-remote", self.metadata_dir, "HEAD"])
                    if rc:
                        raise IOError(
                            'Unable to check remote sha: {}'.format(err))
                    remote = out.strip().split('\t')[0]

                    try:
                        exectools.cmd_assert(
                            'git branch --contains {}'.format(remote))
                        self.logger.info(
                            '{} is already cloned and latest'.format(
                                self.metadata_dir))
                        clone_data = False
                    except:
                        rc, out, err = exectools.cmd_gather(
                            'git log origin/HEAD..HEAD')
                        out = out.strip()
                        if len(out):
                            msg = """
                            Local config is out of sync with remote and you have unpushed commits. {}
                            You must either clear your local config repo with `./oit.py cleanup`
                            or manually rebase from latest remote to continue
                            """.format(md_destination)
                            raise IOError(msg)

            if clone_data:
                if os.path.isdir(md_destination):  # delete if already there
                    shutil.rmtree(md_destination)
                self.logger.info('Cloning config data from {}'.format(
                    self.metadata_dir))
                if not os.path.isdir(md_destination):
                    cmd = "git clone --depth 1 {} {}".format(
                        self.metadata_dir, md_destination)
                    try:
                        exectools.cmd_assert(cmd.split(' '))
                    except:
                        if self.metadata_dir == constants.OCP_BUILD_DATA_RW:

                            self.logger.warn(
                                'Failed to clone {}, falling back to {}'.
                                format(constants.OCP_BUILD_DATA_RW,
                                       constants.OCP_BUILD_DATA_RO))
                            self.metadata_dir = constants.OCP_BUILD_DATA_RO
                            return self.resolve_metadata()
                        else:
                            raise
            self.metadata_dir = md_destination

        elif md_url.scheme in ['', 'file']:
            # no scheme, assume the path is a local file
            self.metadata_dir = md_url.path
            if not os.path.isdir(self.metadata_dir):
                raise ValueError(
                    "Invalid metadata_dir: {} - Not a directory".format(
                        self.metadata_dir))

        else:
            # invalid scheme: not '' or any of the valid list
            raise ValueError(
                "Invalid metadata_dir: {} - invalid scheme: {}".format(
                    self.metadata_dir, md_url.scheme))
Ejemplo n.º 14
0
    def resolve_source(self, alias, required=True):
        """
        Looks up a source alias and returns a path to the directory containing
        that source. Sources can be specified on the command line, or, failing
        that, in group.yml.
        If a source specified in group.yaml has not be resolved before,
        this method will clone that source to checkout the group's desired
        branch before returning a path to the cloned repo.
        :param alias: The source alias to resolve
        :param required: If True, thrown an exception if not found
        :return: Returns the source path or None (if required=False)
        """

        self.logger.debug(
            "Resolving local source directory for alias {}".format(alias))
        if alias in self.source_paths:
            self.logger.debug(
                "returning previously resolved path for alias {}: {}".format(
                    alias, self.source_paths[alias]))
            return self.source_paths[alias]

        # Check if the group config specs the "alias" for the source location
        if (self.group_config.sources is Missing
                or alias not in self.group_config.sources):
            if required:
                raise IOError(
                    "Source alias not found in specified sources or in the current group: %s"
                    % alias)
            else:
                return None

        # Where the source will land
        source_dir = os.path.join(self.sources_dir, alias)
        self.logger.debug(
            "checking for source directory in source_dir: {}".format(
                source_dir))

        # If this source has already been extracted for this working directory
        if os.path.isdir(source_dir):
            # Store so that the next attempt to resolve the source hits the map
            self.source_paths[alias] = source_dir
            self.logger.info(
                "Source '{}' already exists in (skipping clone): {}".format(
                    alias, source_dir))
            return source_dir

        source_config = self.group_config.sources[alias]
        url = source_config["url"]
        branches = source_config['branch']
        self.logger.info(
            "Cloning source '%s' from %s as specified by group into: %s" %
            (alias, url, source_dir))
        exectools.cmd_assert(
            cmd=["git", "clone", url, source_dir],
            retries=3,
            on_retry=["rm", "-rf", source_dir],
        )
        stage_branch = branches.get('stage', None)
        fallback_branch = branches.get("fallback", None)
        found = False
        with Dir(source_dir):
            if self.stage and stage_branch:
                self.logger.info(
                    'Normal branch overridden by --stage option, using "{}"'.
                    format(stage_branch))
                branch = stage_branch
            else:
                branch = branches["target"]
            self.logger.info(
                "Attempting to checkout source '%s' branch %s in: %s" %
                (alias, branch, source_dir))

            if branch != "master":
                rc, out, err = exectools.cmd_gather(
                    ["git", "checkout", "-b", branch,
                     "origin/%s" % branch])
            else:
                rc = 0

            if rc == 0:
                found = True
            else:
                if self.stage and stage_branch:
                    raise IOError(
                        '--stage option specified and no stage branch named "{}" exists for {}|{}'
                        .format(stage_branch, alias, url))
                elif fallback_branch is not None:
                    self.logger.info(
                        "Unable to checkout branch %s ; trying fallback %s" %
                        (branch, fallback_branch))
                    self.logger.info(
                        "Attempting to checkout source '%s' fallback-branch %s in: %s"
                        % (alias, fallback_branch, source_dir))
                    if fallback_branch != "master":
                        rc2, out, err = exectools.cmd_gather([
                            "git", "checkout", "-b", fallback_branch,
                            "origin/%s" % fallback_branch
                        ], )
                    else:
                        rc2 = 0

                    if rc2 == 0:
                        found = True
                    else:
                        self.logger.error(
                            "Failed checking out fallback-branch %s: %s" %
                            (branch, err))
                else:
                    self.logger.error("Failed checking out branch %s: %s" %
                                      (branch, err))

            if found:
                # Store so that the next attempt to resolve the source hits the map
                self.register_source_alias(alias, source_dir)
                return source_dir
            else:
                if required:
                    raise IOError(
                        "Error checking out target branch of source '%s' in: %s"
                        % (alias, source_dir))
                else:
                    return None
Ejemplo n.º 15
0
 def _cmd(self, cmd):
     """Run a docker related command"""
     return exectools.cmd_gather(shlex.split(cmd))