예제 #1
0
 def __init__(self, tasker, workflow, remote_sources=None, destdir=IMAGE_BUILD_INFO_DIR):
     """
     :param tasker: ContainerTasker instance
     :param workflow: DockerBuildWorkflow instance
     :param remote_sources: list of dicts, each dict contains info about particular
     remote source with the following keys:
         build_args: dict, extra args for `builder.build_args`, if any
         configs: list of str, configuration files to be injected into
         the exploded remote sources dir
         request_id: int, cachito request id; used to request the
         Image Content Manifest
         url: str, URL from which to download a source archive
         name: str, name of remote source
     :param destdir: image path to carry content_manifests data dir
     """
     super(AddImageContentManifestPlugin, self).__init__(tasker, workflow)
     self.content_manifests_dir = os.path.join(destdir, 'content_manifests')
     self.remote_sources = remote_sources
     self.dfp = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
     labels = Labels(self.dfp.labels)
     _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
     _, image_version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
     _, image_release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
     self.icm_file_name = '{}-{}-{}.json'.format(image_name, image_version, image_release)
     self.content_sets = []
     self._cachito_verify = None
     self._layer_index = None
     self._icm = None
     self._cachito_session = None
예제 #2
0
 def __init__(self,
              tasker,
              workflow,
              remote_source_icm_url=None,
              destdir=IMAGE_BUILD_INFO_DIR):
     """
     :param tasker: ContainerTasker instance
     :param workflow: DockerBuildWorkflow instance
     :param icm_url: str, URL of the ICM from the Cachito request.
     :param destdir: image path to carry content_manifests data dir
     """
     super(AddImageContentManifestPlugin, self).__init__(tasker, workflow)
     self.content_manifests_dir = os.path.join(destdir, 'content_manifests')
     self.icm_url = remote_source_icm_url
     self.dfp = df_parser(self.workflow.builder.df_path,
                          workflow=self.workflow)
     labels = Labels(self.dfp.labels)
     _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
     _, image_version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
     _, image_release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
     self.icm_file_name = '{}-{}-{}.json'.format(image_name, image_version,
                                                 image_release)
     self.content_sets = []
     self._cachito_verify = None
     self._layer_index = None
     self._icm = None
예제 #3
0
    def detect_parent_image_nvr(self, image_name, inspect_data=None):
        """
        Look for the NVR labels, if any, in the image.

        :return NVR string if labels found, otherwise None
        """

        if inspect_data is None:
            inspect_data = self.workflow.builder.parent_image_inspect(
                image_name)
        labels = Labels(inspect_data[INSPECT_CONFIG].get('Labels', {}))

        label_names = [
            Labels.LABEL_TYPE_COMPONENT, Labels.LABEL_TYPE_VERSION,
            Labels.LABEL_TYPE_RELEASE
        ]
        label_values = []

        for lbl_name in label_names:
            try:
                _, lbl_value = labels.get_name_and_value(lbl_name)
                label_values.append(lbl_value)
            except KeyError:
                self.log.info(
                    "Failed to find label '%s' in parent image '%s'.",
                    labels.get_name(lbl_name), image_name)

        if len(label_values) != len(
                label_names):  # don't have all the necessary labels
            self.log.info(
                "Image '%s' NVR missing; not searching for Koji build.",
                image_name)
            return None

        return '-'.join(label_values)
예제 #4
0
    def adjust_for_repo_info(self):
        if not self._repo_info:
            logger.warning('repo info not set')
            return

        if not self._repo_info.configuration.is_autorebuild_enabled():
            logger.info(
                'autorebuild is disabled in repo configuration, removing triggers'
            )
            self.template['spec'].pop('triggers', None)

        else:
            labels = Labels(self._repo_info.dockerfile_parser.labels)

            add_timestamp = self._repo_info.configuration.autorebuild.\
                get('add_timestamp_to_release', False)

            if add_timestamp:
                logger.info(
                    'add_timestamp_to_release is enabled for autorebuilds,'
                    'skipping release check in dockerfile')
                return

            try:
                labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
            except KeyError:
                # As expected, release label not set in Dockerfile
                pass
            else:
                raise RuntimeError(
                    'when autorebuild is enabled in repo configuration, '
                    '"release" label must not be set in Dockerfile')
    def detect_parent_image_nvr(self, image_name, inspect_data=None):
        """
        Look for the NVR labels, if any, in the image.

        :return NVR string if labels found, otherwise None
        """

        if inspect_data is None:
            inspect_data = self.workflow.builder.parent_image_inspect(image_name)
        labels = Labels(inspect_data[INSPECT_CONFIG].get('Labels', {}))

        label_names = [Labels.LABEL_TYPE_COMPONENT, Labels.LABEL_TYPE_VERSION,
                       Labels.LABEL_TYPE_RELEASE]
        label_values = []

        for lbl_name in label_names:
            try:
                _, lbl_value = labels.get_name_and_value(lbl_name)
                label_values.append(lbl_value)
            except KeyError:
                self.log.info("Failed to find label '%s' in parent image '%s'.",
                              labels.get_name(lbl_name), image_name)

        if len(label_values) != len(label_names):  # don't have all the necessary labels
            self.log.info("Image '%s' NVR missing; not searching for Koji build.", image_name)
            return None

        return '-'.join(label_values)
 def get_component_name(self):
     try:
         labels = Labels(self.labels)
         _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
     except KeyError:
         self.log.error('Unable to determine component from "Labels"')
         raise
     return name
예제 #7
0
    def run(self):
        source = get_flatpak_source_info(self.workflow)
        if source is None:
            raise RuntimeError(
                "flatpak_create_dockerfile must be run before flatpak_create_oci"
            )

        self.builder = FlatpakBuilder(source,
                                      self.workflow.source.workdir,
                                      'var/tmp/flatpak-build',
                                      parse_manifest=parse_rpm_output,
                                      flatpak_metadata=self.flatpak_metadata)

        df_labels = df_parser(self.workflow.builder.df_path,
                              workflow=self.workflow).labels
        self.builder.add_labels(df_labels)

        tarred_filesystem, manifest = self._export_filesystem()
        self.log.info('filesystem tarfile written to %s', tarred_filesystem)
        self.log.info('manifest written to %s', manifest)

        image_components = self.builder.get_components(manifest)
        self.workflow.image_components = image_components

        ref_name, outfile, tarred_outfile = self.builder.build_container(
            tarred_filesystem)

        self.log.info('Marking filesystem image "%s" for removal',
                      self.workflow.builder.image_id)
        defer_removal(self.workflow, self.workflow.builder.image_id)

        image_id = self._get_oci_image_id(outfile)
        self.log.info('New OCI image ID is %s', image_id)
        self.workflow.builder.image_id = image_id

        labels = Labels(df_labels)
        _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
        _, image_version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, image_release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        name = '{}-{}'.format(self.key, image_name)
        tag = '{}-{}'.format(image_version, image_release)
        # The OCI id is tracked by the builder. The image will be removed in the exit phase
        # No need to mark it for removal after pushing to the local storage
        self._copy_oci_to_local_storage(outfile, name, tag)

        metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI)
        metadata['ref_name'] = ref_name
        self.workflow.exported_image_sequence.append(metadata)

        self.log.info('OCI image is available as %s', outfile)

        metadata = get_exported_image_metadata(tarred_outfile,
                                               IMAGE_TYPE_OCI_TAR)
        metadata['ref_name'] = ref_name
        self.workflow.exported_image_sequence.append(metadata)

        self.log.info('OCI tarfile is available as %s', tarred_outfile)
예제 #8
0
    def render_help_file(self, build_dir: BuildDir) -> List[Path]:
        """Update the help.md file in the build directory and use it to generate a man file."""
        dockerfile = build_dir.dockerfile_with_parent_env(
            # platform should not matter, we only care about the component and maintainer labels
            self.workflow.imageutil.base_image_inspect())
        labels = Labels(dockerfile.labels)
        try:
            _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
        except KeyError:
            name = ''
        maintainer = dockerfile.labels.get('maintainer', '')

        help_path = build_dir.path / self.help_file

        start_time = get_pipeline_run_start_time(
            self.workflow.osbs, self.workflow.pipeline_run_name)

        with open(help_path, 'r+') as help_file:
            lines = help_file.readlines()

            if not lines[0].startswith("% "):
                lines.insert(0, "%% %s (1) Container Image Pages\n" % name)
                lines.insert(1, "%% %s\n" % maintainer)
                lines.insert(2, "%% %s\n" % start_time.strftime("%B %-d, %Y"))

                help_file.seek(0)
                help_file.truncate()
                help_file.writelines(lines)

                self.log.info(
                    "added metadata to %s for generating nicer manpages",
                    help_path)

        man_path = build_dir.path / self.man_filename

        go_md2man_cmd = ['go-md2man', f'-in={help_path}', f'-out={man_path}']

        try:
            check_output(go_md2man_cmd, stderr=STDOUT)
        except OSError as e:
            if e.errno == errno.ENOENT:
                raise RuntimeError(
                    "Help file is available, but go-md2man is not present in a buildroot"
                ) from e

            raise
        except CalledProcessError as e:
            raise RuntimeError(
                "Error running %s: %s, exit code: %s, output: '%s'" %
                (e.cmd, e, e.returncode, e.output)) from e

        if not man_path.exists():
            raise RuntimeError(
                "go-md2man run complete, but man file is not found")

        # We modified one file and created the other, let's copy both to all per-platform dirs
        return [help_path, man_path]
예제 #9
0
 def icm_file_name(self):
     """Determine the name for the ICM file (name-version-release.json)."""
     # parse Dockerfile for any platform, the N-V-R labels should be equal for all platforms
     dockerfile = self.workflow.build_dir.any_platform.dockerfile_with_parent_env(
         self.workflow.imageutil.base_image_inspect())
     labels = Labels(dockerfile.labels)
     _, name = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
     _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
     _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
     return f"{name}-{version}-{release}.json"
 def add_release_env_var(self, df_parser):
     release_env_var = self.workflow.source.config.release_env_var
     if release_env_var:
         final_labels = Labels(df_parser.labels)
         try:
             _, final_release = final_labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
             release_line = "ENV {}={}".format(release_env_var, final_release)
             df_parser.add_lines(release_line, at_start=True, all_stages=True)
         except KeyError:
             self.log.warning("environment release variable %s could not be set because no "
                              "release label found", release_env_var)
    def has_operator_manifest(self):
        """
        Check if Dockerfile sets the operator manifest label

        :return: bool
        """
        dockerfile = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        labels = Labels(dockerfile.labels)
        try:
            _, operator_label = labels.get_name_and_value(Labels.LABEL_TYPE_OPERATOR_MANIFESTS)
        except KeyError:
            operator_label = 'false'
        return operator_label.lower() == 'true'
    def run(self):
        """
        run the plugin
        """

        parser = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        dockerfile_labels = parser.labels
        labels = Labels(dockerfile_labels)

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)

        try:
            component = dockerfile_labels[component_label]
        except KeyError:
            raise RuntimeError("missing label: {}".format(component_label))

        version_label = labels.get_name(Labels.LABEL_TYPE_VERSION)
        try:
            version = dockerfile_labels[version_label]
        except KeyError:
            raise RuntimeError('missing label: {}'.format(version_label))

        try:
            _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
        except KeyError:
            release = None

        if release:
            if not self.append:
                self.log.debug("release set explicitly so not incrementing")
                if not is_scratch_build():
                    self.check_build_existence_for_explicit_release(component, version, release)
                return

        if self.append:
            next_release = self.get_next_release_append(component, version, release)
        elif is_scratch_build():
            metadata = get_build_json().get("metadata", {})
            next_release = metadata.get("name", "1")
        else:
            next_release = self.get_next_release_standard(component, version)

        # Always set preferred release label - other will be set if old-style
        # label is present
        release_label = labels.LABEL_NAMES[Labels.LABEL_TYPE_RELEASE][0]

        # No release labels are set so set them
        self.log.info("setting %s=%s", release_label, next_release)
        # Write the label back to the file (this is a property setter)
        dockerfile_labels[release_label] = next_release
예제 #13
0
    def _get_image_name_and_repos(self):

        dockerfile = self.workflow.build_dir.any_platform.dockerfile_with_parent_env(
            self.workflow.imageutil.base_image_inspect()
        )
        labels = Labels(dockerfile.labels)
        _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)

        stored_data = self.workflow.data.plugins_results.get(StoreMetadataPlugin.key)
        if not stored_data or 'annotations' not in stored_data:
            raise ValueError('Stored Metadata not found')

        repos = []
        if (annotation_repos := stored_data['annotations'].get('repositories')) is None:
            self.log.debug('repositories is not included in annotations.')
    def get_component_name(self):
        try:
            labels = Labels(self.labels)
            _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
        except KeyError:
            self.log.error('Unable to determine component from "Labels"')
            raise

        organization = get_registries_organization(self.workflow)
        if organization:
            image = ImageName.parse(name)
            image.enclose(organization)
            name = image.get_repo()

        return name
예제 #15
0
    def resolve_docker_image_repo(self, docker_image_repo_fallback):
        # The plugin parameter docker_image_repo is actually a combination
        # of source_registry_uri and name label. Thus, the fallback case must
        # be handled in a non-generic way.
        try:
            source_registry = get_source_registry(self.workflow)
        except KeyError:
            return docker_image_repo_fallback

        registry = source_registry['uri'].docker_uri

        labels = Labels(df_parser(self.workflow.builder.df_path).labels)
        _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)

        return '/'.join([registry, name])
예제 #16
0
    def _update_build(self, build):
        labels = Labels(df_parser(self.workflow.builder.df_path,
                                  workflow=self.workflow).labels)
        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source

        build.update({
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
        })
예제 #17
0
 def _nvr_from_dockerfile(self) -> str:
     # any_platform: the N-V-R labels should be equal for all platforms
     dockerfile = self.workflow.build_dir.any_platform.dockerfile_with_parent_env(
         self.workflow.imageutil.base_image_inspect())
     labels = Labels(dockerfile.labels)
     try:
         _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
         _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
         _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
     except KeyError as exc:
         raise ValueError(
             "Required name/version/release labels not found in Dockerfile"
         ) from exc
     nvr = f"{name}-{version}-{release}"
     return nvr.replace("/", "-")
예제 #18
0
    def get_component_name(self):
        try:
            labels = Labels(self.labels)
            _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
        except KeyError:
            self.log.error('Unable to determine component from "Labels"')
            raise

        organization = self.workflow.conf.registries_organization
        if organization:
            image = ImageName.parse(name)
            image.enclose(organization)
            name = image.get_repo()

        return name
예제 #19
0
    def run(self):
        """
        run the plugin
        """

        parser = df_parser(self.workflow.builder.df_path,
                           workflow=self.workflow)
        dockerfile_labels = parser.labels
        labels = Labels(dockerfile_labels)

        try:
            _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
            if not self.append:
                self.log.debug("release set explicitly so not incrementing")
                return
        except KeyError:
            release = None

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)

        try:
            component = dockerfile_labels[component_label]
        except KeyError:
            raise RuntimeError("missing label: {}".format(component_label))

        version_label = labels.get_name(Labels.LABEL_TYPE_VERSION)
        try:
            version = dockerfile_labels[version_label]
        except KeyError:
            raise RuntimeError('missing label: {}'.format(version_label))

        if self.append:
            next_release = self.get_next_release_append(
                component, version, release)
        elif is_scratch_build():
            metadata = get_build_json().get("metadata", {})
            next_release = metadata.get("name", "1")
        else:
            next_release = self.get_next_release_standard(component, version)

        # Always set preferred release label - other will be set if old-style
        # label is present
        release_label = labels.LABEL_NAMES[Labels.LABEL_TYPE_RELEASE][0]

        # No release labels are set so set them
        self.log.info("setting %s=%s", release_label, next_release)
        # Write the label back to the file (this is a property setter)
        dockerfile_labels[release_label] = next_release
예제 #20
0
    def label_version_check(self):
        """Check that Dockerfile version has correct name."""
        msg = "Dockerfile version label can't contain '/' character"
        self.log.debug("Running check: %s", msg)

        # any_platform: the version label should be equal for all platforms
        parser = self.workflow.build_dir.any_platform.dockerfile_with_parent_env(
            self.workflow.imageutil.base_image_inspect())
        dockerfile_labels = parser.labels
        labels = Labels(parser.labels)

        component_label = labels.get_name(Labels.LABEL_TYPE_VERSION)
        label_version = dockerfile_labels[component_label]

        if '/' in label_version:
            raise ValueError(msg)
예제 #21
0
    def run(self):
        if not self.workflow.built_image_inspect:
            raise RuntimeError("There are no inspect data of built image. "
                               "Have the build succeeded?")
        if "Labels" not in self.workflow.built_image_inspect[INSPECT_CONFIG]:
            raise RuntimeError("No labels specified.")
        labels = Labels(
            self.workflow.built_image_inspect[INSPECT_CONFIG]['Labels'])

        def get_label(labels, label_name):
            try:
                _, value = labels.get_name_and_value(label_name)
                return value
            except KeyError:
                raise RuntimeError("Missing label '%s'." % label_name)

        name = get_label(labels, Labels.LABEL_TYPE_NAME)

        unique_tag = self.workflow.builder.image.tag
        n_unique = "%s:%s" % (name, unique_tag)
        self.workflow.tag_conf.add_unique_image(n_unique)

        if self.unique_tag_only:
            self.log.debug('Skipping transient tags')
            return

        version = get_label(labels, Labels.LABEL_TYPE_VERSION)
        release = get_label(labels, Labels.LABEL_TYPE_RELEASE)

        nvr = "%s:%s-%s" % (name, version, release)
        nv = "%s:%s" % (name, version)
        n = "%s:latest" % name

        self.workflow.tag_conf.add_primary_images([nvr, nv, n])
    def _get_component_name(self, image):
        """
        Get package for image by querying registry and looking at labels.
        """
        self.log.debug("Querying %s for image labels", image.registry)
        # Do not import get_inspect_for_image directly, needs to be mocked in tests
        inspect = util.get_inspect_for_image(image, image.registry)
        labels = Labels(inspect[INSPECT_CONFIG].get("Labels", {}))

        try:
            _, package = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
            self.log.debug("Resolved package name: %s", package)
        except KeyError:
            raise RuntimeError("Image has no component label: {}".format(image))

        return package
예제 #23
0
    def _get_component_name(self, image):
        """
        Get package for image by querying registry and looking at labels.
        """
        self.log.debug("Querying %s for image labels", image.registry)
        registry_client = self._get_registry_client(image.registry)
        inspect = registry_client.get_inspect_for_image(image)
        labels = Labels(inspect[INSPECT_CONFIG].get("Labels", {}))

        try:
            _, package = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
            self.log.debug("Resolved package name: %s", package)
        except KeyError as exc:
            raise RuntimeError("Image has no component label: {}".format(image)) from exc

        return package
    def __init__(self,
                 tasker,
                 workflow,
                 nvr=None,
                 destdir=IMAGE_BUILD_INFO_DIR,
                 use_final_dockerfile=False):
        """
        constructor

        :param tasker: ContainerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param nvr: name-version-release, will be appended to Dockerfile-.
                    If not specified, try to get it from Name, Version, Release labels.
        :param destdir: directory in the image to put Dockerfile-N-V-R into
        :param use_final_dockerfile: bool, when set to True, uses final version of processed
                                     dockerfile,
                                     when set to False, uses Dockerfile from time when this plugin
                                     was executed
        """
        # call parent constructor
        super(AddDockerfilePlugin, self).__init__(tasker, workflow)

        self.use_final_dockerfile = use_final_dockerfile

        if nvr is None:
            labels = Labels(df_parser(self.workflow.builder.df_path).labels)
            try:
                _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
                _, version = labels.get_name_and_value(
                    Labels.LABEL_TYPE_VERSION)
                _, release = labels.get_name_and_value(
                    Labels.LABEL_TYPE_RELEASE)
            except KeyError as exc:
                raise ValueError(
                    "You have to specify either nvr arg or name/version/release labels."
                ) from exc
            nvr = "{0}-{1}-{2}".format(name, version, release)
            nvr = nvr.replace("/", "-")
        self.df_name = '{0}-{1}'.format(DOCKERFILE_FILENAME, nvr)
        self.df_dir = destdir
        self.df_path = os.path.join(self.df_dir, self.df_name)

        # we are not using final dockerfile, so let's copy current snapshot
        if not self.use_final_dockerfile:
            local_df_path = os.path.join(self.workflow.builder.df_dir,
                                         self.df_name)
            shutil.copy2(self.workflow.builder.df_path, local_df_path)
예제 #25
0
    def _update_build(self, build):
        # any_platform: the N-V-R labels should be equal for all platforms
        dockerfile = self.workflow.build_dir.any_platform.dockerfile_with_parent_env(
            self.workflow.imageutil.base_image_inspect())
        labels = Labels(dockerfile.labels)
        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source

        build.update({
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
        })
예제 #26
0
    def __init__(self,
                 workflow,
                 labels=None,
                 dont_overwrite=None,
                 auto_labels=("build-date", "architecture", "vcs-type",
                              "vcs-ref"),
                 aliases=None,
                 dont_overwrite_if_in_dockerfile=("distribution-scope",
                                                  "com.redhat.license_terms")):
        """
        constructor

        :param workflow: DockerBuildWorkflow instance
        :param labels: dict, key value pairs to set as labels; or str, JSON-encoded dict
        :param dont_overwrite: iterable, list of label keys which should not be overwritten
                               if they are present in parent image
        :param auto_labels: iterable, list of labels to be determined automatically, if supported
                            it should contain only new label names and not old label names,
                            as they will be managed automatically
        :param aliases: dict, maps old label names to new label names - for each old name found in
                        base image, dockerfile, or labels argument, a label with the new name is
                        added (with the same value)
        :param dont_overwrite_if_in_dockerfile : iterable, list of label keys which should not be
                                                 overwritten if they are present in dockerfile
        """
        # call parent constructor
        super(AddLabelsPlugin, self).__init__(workflow)

        if isinstance(labels, str):
            labels = json.loads(labels)
        if labels and not isinstance(labels, dict):
            raise RuntimeError("labels have to be dict")

        # see if REACTOR_CONFIG has any labels. If so, merge them with the existing argument
        # and otherwise use the existing argument
        image_labels = self.workflow.conf.image_labels

        # validity of image_labels is enforced by REACTOR_CONFIG's schema, so no need to check
        if image_labels:
            if labels:
                labels.update(image_labels)
            else:
                labels = image_labels

        self.labels = labels or {}

        self.dont_overwrite = dont_overwrite or ()
        self.dont_overwrite_if_in_dockerfile = dont_overwrite_if_in_dockerfile
        self.aliases = aliases or Labels.get_new_names_by_old()
        self.auto_labels = auto_labels or ()
        self.info_url_format = self.workflow.conf.image_label_info_url_format

        self.equal_labels = self.workflow.conf.image_equal_labels
        if not isinstance(self.equal_labels, list):
            raise RuntimeError("equal_labels have to be list")
    def update_config_from_dockerfile(self, config):
        """Updates build config with values from the Dockerfile

        Updates:
          * set "name" from LABEL com.redhat.component (if exists)
          * set "version" from LABEL version (if exists)

        :param config: ConfigParser object
        """
        labels = Labels(df_parser(self.workflow.builder.df_path).labels)
        for config_key, label in (
            ('name', Labels.LABEL_TYPE_COMPONENT),
            ('version', Labels.LABEL_TYPE_VERSION),
        ):
            try:
                _, value = labels.get_name_and_value(label)
            except KeyError:
                pass
            else:
                config.set('image-build', config_key, value)
예제 #28
0
    def update_config_from_dockerfile(self, config):
        """Updates build config with values from the Dockerfile

        Updates:
          * set "name" from LABEL com.redhat.component (if exists)
          * set "version" from LABEL version (if exists)

        :param config: ConfigParser object
        """
        labels = Labels(self.workflow.build_dir.any_platform.dockerfile.labels)
        for config_key, label in (
            ('name', Labels.LABEL_TYPE_COMPONENT),
            ('version', Labels.LABEL_TYPE_VERSION),
        ):
            try:
                _, value = labels.get_name_and_value(label)
            except KeyError:
                pass
            else:
                config.set('image-build', config_key, value)
예제 #29
0
    def _get_image_name_and_repos(self):

        repos = []
        dockerfile = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        labels = Labels(dockerfile.labels)
        _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)

        stored_data = self.workflow.exit_results.get(StoreMetadataInOSv3Plugin.key)
        if not stored_data or 'annotations' not in stored_data:
            raise ValueError('Stored Metadata not found')

        repo_data = json.loads(stored_data['annotations']['repositories'])

        repos.extend(repo_data.get('unique', []))
        repos.extend(repo_data.get('primary', []))
        repos.extend(repo_data.get('floating', []))

        if repos:
            image_name_obj = ImageName.parse(repos[0])
            image_name = image_name_obj.get_repo()

        return (image_name, repos)
    def __init__(self, tasker, workflow, nvr=None, destdir="/root/buildinfo/",
                 use_final_dockerfile=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param nvr: name-version-release, will be appended to Dockerfile-.
                    If not specified, try to get it from Name, Version, Release labels.
        :param destdir: directory in the image to put Dockerfile-N-V-R into
        :param use_final_dockerfile: bool, when set to True, uses final version of processed
                                     dockerfile,
                                     when set to False, uses Dockerfile from time when this plugin
                                     was executed
        """
        # call parent constructor
        super(AddDockerfilePlugin, self).__init__(tasker, workflow)

        self.use_final_dockerfile = use_final_dockerfile

        if nvr is None:
            labels = Labels(df_parser(self.workflow.builder.df_path).labels)
            try:
                _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
                _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
                _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
            except KeyError:
                raise ValueError("You have to specify either nvr arg or name/version/release "
                                 "labels.")
            nvr = "{0}-{1}-{2}".format(name, version, release)
            nvr = nvr.replace("/", "-")
        self.df_name = '{0}-{1}'.format(DOCKERFILE_FILENAME, nvr)
        self.df_dir = destdir
        self.df_path = os.path.join(self.df_dir, self.df_name)

        # we are not using final dockerfile, so let's copy current snapshot
        if not self.use_final_dockerfile:
            local_df_path = os.path.join(self.workflow.builder.df_dir, self.df_name)
            shutil.copy2(self.workflow.builder.df_path, local_df_path)
예제 #31
0
    def _get_image_name_and_repos(self):

        repos = []
        dockerfile = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        labels = Labels(dockerfile.labels)
        _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)

        stored_data = self.workflow.exit_results.get(StoreMetadataInOSv3Plugin.key)
        if not stored_data or 'annotations' not in stored_data:
            raise ValueError('Stored Metadata not found')

        repo_data = json.loads(stored_data['annotations']['repositories'])

        repos.extend(repo_data.get('unique', []))
        repos.extend(repo_data.get('primary', []))
        repos.extend(repo_data.get('floating', []))

        if repos:
            image_name_obj = ImageName.parse(repos[0])
            image_name = image_name_obj.get_repo()

        return (image_name, repos)
예제 #32
0
    def _get_nvr(self, dockerfile_labels) -> Tuple[str, str, Optional[str]]:
        """Get the component, version and release labels from the Dockerfile."""
        labels = Labels(dockerfile_labels)

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)
        component: Optional[str] = dockerfile_labels.get(component_label)

        version_label = labels.get_name(Labels.LABEL_TYPE_VERSION)
        version: Optional[str] = dockerfile_labels.get(version_label)

        release_label = labels.get_name(Labels.LABEL_TYPE_RELEASE)
        release: Optional[str] = dockerfile_labels.get(release_label)

        missing_labels = {}

        # component, version: must be present and not empty
        for label, value in (component_label, component), (version_label, version):
            if value is None:
                self.log.error("missing label: %s", label)
                missing_labels[label] = "missing"
            elif not value:
                self.log.error("empty label: %s", label)
                missing_labels[label] = "empty"

        # release: if present, must not be empty
        if (release is not None) and not release:
            self.log.error("empty label: %s", release_label)
            missing_labels[release_label] = "empty"

        if missing_labels:
            raise RuntimeError(
                "Required labels are missing or empty or using undefined variables: {}"
                .format(missing_labels)
            )

        # For type-checkers: narrow the type of component and version to str
        assert component is not None and version is not None

        return component, version, release
예제 #33
0
    def _skip_all(self):
        skip_all = self.user_config.get("skip_all", False)

        if not skip_all:
            return False

        site_config = get_operator_manifests(self.workflow)
        allowed_packages = site_config.get("skip_all_allow_list", [])

        parser = df_parser(self.workflow.builder.df_path,
                           workflow=self.workflow)
        dockerfile_labels = parser.labels
        labels = Labels(dockerfile_labels)

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)
        component = dockerfile_labels[component_label]

        if component in allowed_packages:
            return True
        else:
            raise RuntimeError(
                "Koji package: {} isn't allowed to use skip_all for operator "
                "bundles".format(component))
예제 #34
0
    def detect_parent_image_nvr(self):
        config = self.workflow.base_image_inspect[INSPECT_CONFIG]
        labels = Labels(config['Labels'] or {})

        label_names = [Labels.LABEL_TYPE_COMPONENT, Labels.LABEL_TYPE_VERSION,
                       Labels.LABEL_TYPE_RELEASE]
        label_values = []

        for lbl_name in label_names:
            try:
                _, lbl_value = labels.get_name_and_value(lbl_name)
                label_values.append(lbl_value)
            except KeyError:
                self.log.info("Failed to find label '%s' in parent image.",
                              labels.get_name(lbl_name))

        if len(label_values) != len(label_names):
            self._parent_image_nvr = None
            self.log.info("Not waiting for Koji build.")
            return False

        self._parent_image_nvr = '-'.join(label_values)
        return True
예제 #35
0
def test_labels(labels, fnc, expect):
    label = Labels(labels)

    fn, arg = fnc
    if isinstance(expect, type):
        with pytest.raises(expect):
            if arg is not None:
                assert getattr(label, fn)(arg) == expect
            else:
                assert getattr(label, fn)() == expect
    else:
        if arg is not None:
            assert getattr(label, fn)(arg) == expect
        else:
            assert getattr(label, fn)() == expect
예제 #36
0
    def _skip_all(self):
        skip_all = self.user_config.get("skip_all", False)

        if not skip_all:
            return False

        site_config = self.workflow.conf.operator_manifests
        allowed_packages = site_config.get("skip_all_allow_list", [])

        # any_platform: the component label should be equal for all platforms
        parser = self.workflow.build_dir.any_platform.dockerfile_with_parent_env(
            self.workflow.imageutil.base_image_inspect()
        )
        dockerfile_labels = parser.labels
        labels = Labels(dockerfile_labels)

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)
        component = dockerfile_labels[component_label]

        if component in allowed_packages:
            return True
        else:
            raise RuntimeError("Koji package: {} isn't allowed to use skip_all for operator "
                               "bundles".format(component))
    def __init__(self,
                 tasker,
                 workflow,
                 labels,
                 dont_overwrite=None,
                 auto_labels=("build-date", "architecture", "vcs-type",
                              "vcs-ref", "com.redhat.build-host"),
                 aliases=None,
                 dont_overwrite_if_in_dockerfile=("distribution-scope", ),
                 info_url_format=None,
                 equal_labels=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param labels: dict, key value pairs to set as labels; or str, JSON-encoded dict
        :param dont_overwrite: iterable, list of label keys which should not be overwritten
                               if they are present in parent image
        :param auto_labels: iterable, list of labels to be determined automatically, if supported
                            it should contain only new label names and not old label names,
                            as they will be managed automatically
        :param aliases: dict, maps old label names to new label names - for each old name found in
                        base image, dockerfile, or labels argument, a label with the new name is
                        added (with the same value)
        :param dont_overwrite_if_in_dockerfile : iterable, list of label keys which should not be
                                                 overwritten if they are present in dockerfile
        :param info_url_format : string, format for url dockerfile label
        :param equal_labels: list, with equal labels groups as lists
        """
        # call parent constructor
        super(AddLabelsPlugin, self).__init__(tasker, workflow)
        if isinstance(labels, str):
            labels = json.loads(labels)
        if not isinstance(labels, dict):
            raise RuntimeError("labels have to be dict")
        self.labels = labels
        self.dont_overwrite = dont_overwrite or ()
        self.dont_overwrite_if_in_dockerfile = dont_overwrite_if_in_dockerfile
        self.aliases = aliases or Labels.get_new_names_by_old()
        self.auto_labels = auto_labels or ()
        self.info_url_format = info_url_format
        self.equal_labels = equal_labels or []
        if not isinstance(self.equal_labels, list):
            raise RuntimeError("equal_labels have to be list")
    def __init__(self, tasker, workflow, labels, dont_overwrite=None,
                 auto_labels=("build-date",
                              "architecture",
                              "vcs-type",
                              "vcs-ref",
                              "com.redhat.build-host"),
                 aliases=None,
                 dont_overwrite_if_in_dockerfile=("distribution-scope",),
                 info_url_format=None,
                 equal_labels=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param labels: dict, key value pairs to set as labels; or str, JSON-encoded dict
        :param dont_overwrite: iterable, list of label keys which should not be overwritten
                               if they are present in parent image
        :param auto_labels: iterable, list of labels to be determined automatically, if supported
                            it should contain only new label names and not old label names,
                            as they will be managed automatically
        :param aliases: dict, maps old label names to new label names - for each old name found in
                        base image, dockerfile, or labels argument, a label with the new name is
                        added (with the same value)
        :param dont_overwrite_if_in_dockerfile : iterable, list of label keys which should not be
                                                 overwritten if they are present in dockerfile
        :param info_url_format : string, format for url dockerfile label
        :param equal_labels: list, with equal labels groups as lists
        """
        # call parent constructor
        super(AddLabelsPlugin, self).__init__(tasker, workflow)
        if isinstance(labels, str):
            labels = json.loads(labels)
        if not isinstance(labels, dict):
            raise RuntimeError("labels have to be dict")
        self.labels = labels
        self.dont_overwrite = dont_overwrite or ()
        self.dont_overwrite_if_in_dockerfile = dont_overwrite_if_in_dockerfile
        self.aliases = aliases or Labels.get_new_names_by_old()
        self.auto_labels = auto_labels or ()
        self.info_url_format = info_url_format
        self.equal_labels = equal_labels or []
        if not isinstance(self.equal_labels, list):
            raise RuntimeError("equal_labels have to be list")
 def get_release(self):
     labels = Labels(df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels)
     _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
     return release
예제 #40
0
    def run(self):
        """
        run the plugin

        The plugin returns None if exception occurred,
        self.NO_HELP_FILE_FOUND if no help found
        or self.HELP_GENERATED if help man page was generated
        """

        result = {
            'help_file': self.help_file,
            'status': None
        }

        help_path = os.path.join(self.workflow.builder.df_dir, self.help_file)

        if not os.path.exists(help_path):
            self.log.info("File %s not found", help_path)
            result['status'] = self.NO_HELP_FILE_FOUND
            return result

        dockerfile = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        labels = Labels(dockerfile.labels)
        try:
            _, name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME)
        except KeyError:
            name = ''
        maintainer = dockerfile.labels.get('maintainer', '')

        with open(help_path, 'r+') as help_file:
            lines = help_file.readlines()

            if not lines[0].startswith("% "):
                lines.insert(0, "%% %s (1) Container Image Pages\n" % name)
                lines.insert(1, "%% %s\n" % maintainer)
                lines.insert(2, "%% %s\n" % dt.fromtimestamp(atomic_reactor_start_time)
                             .strftime(format="%B %-d, %Y"))

                help_file.seek(0)
                help_file.truncate()
                help_file.writelines(lines)

                self.log.info("added metadata to %s for generating nicer manpages", help_path)

        man_path = os.path.join(self.workflow.builder.df_dir, self.man_filename)

        go_md2man_cmd = ['go-md2man', '-in={}'.format(help_path), '-out={}'.format(man_path)]

        try:
            check_output(go_md2man_cmd, stderr=STDOUT)
        except OSError as e:
            if e.errno == errno.ENOENT:
                raise RuntimeError(
                    "Help file is available, but go-md2man is not present in a buildroot")

            raise
        except CalledProcessError as e:
            raise RuntimeError("Error running %s: %r, exit code: %s, output: '%s'" % (
                e.cmd, e, e.returncode, e.output))

        if not os.path.exists(man_path):
            raise RuntimeError("go-md2man run complete, but man file is not found")

        # Include the help file in the docker file

        lines = dockerfile.lines

        content = 'ADD {0} /{0}'.format(self.man_filename)
        # put it before last instruction
        lines.insert(-1, content + '\n')

        dockerfile.lines = lines

        self.log.info("added %s", man_path)

        result['status'] = self.HELP_GENERATED
        return result
    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels)

        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {
            'image': {'autorebuild': is_rebuild(self.workflow)},
            'submitter': self.koji_session.getLoggedInUser().get('name'),
        }

        koji_task_owner = None
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = koji_task_id = int(koji_task_id)
                koji_task_owner = get_koji_task_owner(self.koji_session, koji_task_id).get('name')
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            extra['image']['media_types'] = sorted(list(set(pulp_pull_results)))

        # append parent builds and parent_build_id from koji parent
        extra['image'].update(get_parent_image_koji_data(self.workflow))

        # Append isolated build flag
        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            compose_info = get_compose_info(self.workflow)
            koji_metadata = compose_info.koji_metadata()
            koji_metadata['flatpak'] = True
            extra['image'].update(koji_metadata)

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build
    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels)
        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        extra['image'].update(get_parent_image_koji_data(self.workflow))

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            compose_info = get_compose_info(self.workflow)
            koji_metadata = compose_info.koji_metadata()
            koji_metadata['flatpak'] = True
            extra['image'].update(koji_metadata)

        koji_task_owner = get_koji_task_owner(self.session, koji_task_id).get('name')
        extra['submitter'] = self.session.getLoggedInUser()['name']

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }
        if self.workflow.all_yum_repourls:
            extra['image']['yum_repourls'] = self.workflow.all_yum_repourls

        self.set_help(extra, worker_metadatas)
        self.set_media_types(extra, worker_metadatas)
        self.set_go_metadata(extra)
        self.set_operators_metadata(extra, worker_metadatas)
        self.remove_unavailable_manifest_digests(worker_metadatas)
        self.set_group_manifest_info(extra, worker_metadatas)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        return build