Ejemplo n.º 1
0
    def register_source_alias(self, alias, path):
        self.logger.info("Registering source alias %s: %s" % (alias, path))
        path = os.path.abspath(path)
        assertion.isdir(path, "Error registering source alias %s" % alias)
        self.source_paths[alias] = path
        with Dir(path):
            origin_url = "?"
            rc1, out_origin, err_origin = exectools.cmd_gather(
                ["git", "config", "--get", "remote.origin.url"])
            if rc1 == 0:
                origin_url = out_origin.strip()
                # Usually something like "[email protected]:openshift/origin.git"
                # But we want an https hyperlink like http://github.com/openshift/origin
                if origin_url.startswith("git@"):
                    origin_url = origin_url[4:]  # remove git@
                    origin_url = origin_url[:-4]  # remove .git
                    origin_url = origin_url.replace(":", "/", 1)  # replace first colon with /
                    origin_url = "https://%s" % origin_url
            else:
                self.logger.error("Failed acquiring origin url for source alias %s: %s" % (alias, err_origin))

            branch = "?"
            rc2, out_branch, err_branch = exectools.cmd_gather(
                ["git", "rev-parse", "--abbrev-ref", "HEAD"])
            if rc2 == 0:
                branch = out_branch.strip()
            else:
                self.logger.error("Failed acquiring origin branch for source alias %s: %s" % (alias, err_branch))

            self.add_record("source_alias", alias=alias, origin_url=origin_url, branch=branch, path=path)
Ejemplo n.º 2
0
    def test_isdir(self):
        """
        Verify both positive and negative results for directory test
        """
        dir_exists = "/usr"
        dir_missing = "/tmp/doesnotexist"
        not_dir = "/etc/motd"

        try:
            assertion.isdir(dir_exists, "dir missing: {}".format(dir_exists))
        except assertion.FileNotFoundError as fnf_error:
            self.fail(
                "asserted real directory does not exist: {}".format(fnf_error))

        with self.assertRaises(assertion.FileNotFoundError):
            assertion.isdir(dir_missing, "dir missing: {}".format(dir_missing))

        # This should raise NotADirectory
        with self.assertRaises(assertion.FileNotFoundError):
            assertion.isdir(not_dir, "file, not dir: {}".format(not_dir))
Ejemplo n.º 3
0
    def initialize(self,
                   mode='images',
                   clone_distgits=True,
                   validate_content_sets=False,
                   no_group=False,
                   clone_source=True,
                   disabled=None):

        if self.initialized:
            return

        if self.quiet and self.verbose:
            click.echo("Flags --quiet and --verbose are mutually exclusive")
            exit(1)

        # We could mark these as required and the click library would do this for us,
        # but this seems to prevent getting help from the various commands (unless you
        # specify the required parameters). This can probably be solved more cleanly, but TODO
        if not no_group and self.group is None:
            click.echo("Group must be specified")
            exit(1)

        if self.working_dir is None:
            self.working_dir = tempfile.mkdtemp(".tmp", "oit-")
            # This can be set to False by operations which want the working directory to be left around
            self.remove_tmp_working_dir = True
            atexit.register(remove_tmp_working_dir, self)
        else:
            self.working_dir = os.path.abspath(self.working_dir)
            if not os.path.isdir(self.working_dir):
                os.makedirs(self.working_dir)

        self.distgits_dir = os.path.join(self.working_dir, "distgits")
        if not os.path.isdir(self.distgits_dir):
            os.mkdir(self.distgits_dir)

        self.distgits_diff_dir = os.path.join(self.working_dir,
                                              "distgits-diffs")
        if not os.path.isdir(self.distgits_diff_dir):
            os.mkdir(self.distgits_diff_dir)

        self.sources_dir = os.path.join(self.working_dir, "sources")
        if not os.path.isdir(self.sources_dir):
            os.mkdir(self.sources_dir)

        if disabled is not None:
            self.disabled = disabled

        self.initialize_logging()

        self.resolve_metadata()

        if no_group:
            return  # nothing past here should be run without a group

        self.record_log_path = os.path.join(self.working_dir, "record.log")
        self.record_log = open(self.record_log_path, 'a')
        atexit.register(close_file, self.record_log)

        # Directory where brew-logs will be downloaded after a build
        self.brew_logs_dir = os.path.join(self.working_dir, "brew-logs")
        if not os.path.isdir(self.brew_logs_dir):
            os.mkdir(self.brew_logs_dir)

        # Directory for flags between invocations in the same working-dir
        self.flags_dir = os.path.join(self.working_dir, "flags")
        if not os.path.isdir(self.flags_dir):
            os.mkdir(self.flags_dir)

        # Try first that the user has given the proper full path to the
        # groups database directory
        group_dir = os.path.join(self.metadata_dir, self.group)
        if not os.path.isdir(group_dir):
            group_dir = os.path.join(self.metadata_dir, 'groups', self.group)

        assertion.isdir(
            group_dir, "Cannot find group directory {} in {}".format(
                self.group, self.metadata_dir))

        self.group_dir = group_dir

        self.images_dir = images_dir = os.path.join(self.group_dir, 'images')
        self.rpms_dir = rpms_dir = os.path.join(self.group_dir, 'rpms')

        # register the sources
        # For each "--source alias path" on the command line, register its existence with
        # the runtime.
        for r in self.source:
            self.register_source_alias(r[0], r[1])

        if self.sources:
            with open(self.sources, 'r') as sf:
                source_dict = yaml.load(sf)
                if not isinstance(source_dict, dict):
                    raise ValueError(
                        '--sources param must be a yaml file containing a single dict.'
                    )
                for key, val in source_dict.items():
                    self.register_source_alias(key, val)

        with Dir(self.group_dir):
            self.group_config = self.get_group_config(self.group_dir)
            self.arches = self.group_config.get('arches', ['x86_64'])
            self.repos = Repos(self.group_config.repos, self.arches)

            if validate_content_sets:
                self.repos.validate_content_sets()

            if self.group_config.name != self.group:
                raise IOError(
                    "Name in group.yml does not match group name. Someone may have copied this group without updating group.yml (make sure to check branch)"
                )

            if self.group_config.includes is not Missing and self.include is None:
                self.include = self.group_config.includes

            if self.branch is None:
                if self.group_config.branch is not Missing:
                    self.branch = self.group_config.branch
                    self.logger.info("Using branch from group.yml: %s" %
                                     self.branch)
                else:
                    self.logger.info(
                        "No branch specified either in group.yml or on the command line; all included images will need to specify their own."
                    )
            else:
                self.logger.info("Using branch from command line: %s" %
                                 self.branch)

            if len(self.include) > 0:
                self.include = flatten_comma_delimited_entries(self.include)
                self.logger.info("Include list set to: %s" % str(self.include))

            # Initially populated with all .yml files found in the images directory.
            images_filename_list = []
            if os.path.isdir(images_dir):
                with Dir(images_dir):
                    images_filename_list = [
                        x for x in os.listdir(".") if os.path.isfile(x)
                    ]
            else:
                self.logger.debug(
                    '{} does not exist. Skipping image processing for group.'.
                    format(images_dir))

            rpms_filename_list = []
            if os.path.isdir(rpms_dir):
                with Dir(rpms_dir):
                    rpms_filename_list = [
                        x for x in os.listdir(".") if os.path.isfile(x)
                    ]
            else:
                self.logger.debug(
                    '{} does not exist. Skipping RPM processing for group.'.
                    format(rpms_dir))

            # Flattens a list like like [ 'x', 'y,z' ] into [ 'x.yml', 'y.yml', 'z.yml' ]
            # for later checking we need to remove from the lists, but they are tuples. Clone to list
            def flatten_into_filenames(names):
                if not names:
                    return []
                # split csv values
                result = []
                for n in names:
                    result.append([
                        "{}.yml".format(x)
                        for x in n.replace(' ', ',').split(',') if x != ''
                    ])
                # flatten result and remove dupes
                return list(set([y for x in result for y in x]))

            # process excludes before images and rpms
            # to ensure they never get added, -x is global
            exclude_filenames = flatten_into_filenames(self.exclude)
            if exclude_filenames:
                for x in exclude_filenames:
                    if x in images_filename_list:
                        images_filename_list.remove(x)
                    if x in rpms_filename_list:
                        rpms_filename_list.remove(x)

            image_include = []
            image_filenames = flatten_into_filenames(self.images)
            if image_filenames:
                also_exclude = set(image_filenames).intersection(
                    set(exclude_filenames))
                if len(also_exclude):
                    self.logger.warning(
                        "The following images were included and excluded but exclusion takes precedence: {}"
                        .format(', '.join(also_exclude)))
                for image in images_filename_list:
                    if image in image_filenames:
                        image_include.append(image)

            rpm_include = []
            rpms_filenames = flatten_into_filenames(self.rpms)
            if rpms_filenames:
                also_exclude = set(rpms_filenames).intersection(
                    set(exclude_filenames))
                if len(also_exclude):
                    self.logger.warning(
                        "The following rpms were included and excluded but exclusion takes precedence: {}"
                        .format(', '.join(also_exclude)))
                for rpm in rpms_filename_list:
                    if rpm in rpms_filenames:
                        rpm_include.append(rpm)

            missed_include = set(image_filenames +
                                 rpms_filenames) - set(image_include +
                                                       rpm_include)
            if len(missed_include) > 0:
                raise IOError(
                    'Unable to find the following images or rpms configs: {}'.
                    format(', '.join(missed_include)))

            def gen_ImageMetadata(base_dir, config_filename, force):
                metadata = ImageMetadata(self, base_dir, config_filename)
                if force or metadata.enabled:
                    self.image_map[metadata.distgit_key] = metadata

            def gen_RPMMetadata(base_dir, config_filename, force):
                metadata = RPMMetadata(self,
                                       base_dir,
                                       config_filename,
                                       clone_source=clone_source)
                if force or metadata.enabled:
                    self.rpm_map[metadata.distgit_key] = metadata

            def collect_configs(search_type, search_dir, filename_list,
                                include, gen):
                if len(filename_list) == 0:
                    return  # no configs of this type found, bail out

                check_include = len(include) > 0 or self.wip
                with Dir(search_dir):
                    for config_filename in filename_list:
                        is_include = False

                        # loading WIP configs requires a pre-load of the config to check
                        # removing this requirement would require a massive rework of Metadata()
                        # deemed not worth it - AMH 10/9/18
                        is_wip = False
                        if self.wip:
                            full_path = os.path.join(search_dir,
                                                     config_filename)
                            with open(full_path, 'r') as f:
                                cfg_data = yaml.load(f)
                                if cfg_data.get('mode', None) == 'wip':
                                    is_wip = True

                        if not is_wip and check_include:
                            if check_include and config_filename in include:
                                is_include = config_filename in include
                                self.logger.debug("include: " +
                                                  config_filename)
                                include.remove(config_filename)
                            else:
                                self.logger.debug(
                                    "Skipping {} {} since it is not in the include list"
                                    .format(search_type, config_filename))
                                continue

                        try:
                            schema_path = os.path.join(
                                os.path.dirname(os.path.realpath(__file__)),
                                "schema_{}.yml".format(search_type))
                            c = Core(source_file=config_filename,
                                     schema_files=[schema_path])
                            c.validate(raise_exception=True)

                            gen(search_dir, config_filename, self.disabled
                                or is_include or is_wip)
                        except Exception:
                            self.logger.error(
                                "Configuration file failed to load: {}".format(
                                    os.path.join(search_dir, config_filename)))
                            raise

            if mode in ['images', 'both']:
                collect_configs('image', images_dir, images_filename_list,
                                image_include, gen_ImageMetadata)
                if not self.image_map:
                    self.logger.warning(
                        "No image metadata directories found for given options within: {}"
                        .format(self.group_dir))

            if mode in ['rpms', 'both']:
                collect_configs('rpm', rpms_dir, rpms_filename_list,
                                rpm_include, gen_RPMMetadata)
                if not self.rpm_map:
                    self.logger.warning(
                        "No rpm metadata directories found for given options within: {}"
                        .format(self.group_dir))

        # Make sure that the metadata is not asking us to check out the same exact distgit & branch.
        # This would almost always indicate someone has checked in duplicate metadata into a group.
        no_collide_check = {}
        for meta in self.rpm_map.values() + self.image_map.values():
            key = '{}/{}/#{}'.format(meta.namespace, meta.name, meta.branch())
            if key in no_collide_check:
                raise IOError(
                    'Complete duplicate distgit & branch; something wrong with metadata: {} from {} and {}'
                    .format(key, meta.config_filename,
                            no_collide_check[key].config_filename))
            no_collide_check[key] = meta

        # Read in the streams definite for this group if one exists
        streams_path = os.path.join(self.group_dir, "streams.yml")
        if os.path.isfile(streams_path):
            with open(streams_path, "r") as s:
                self.streams = Model(yaml.load(s.read()))
        if clone_distgits:
            self.clone_distgits()