コード例 #1
0
    def get_output(self, worker_metadatas, buildroot_id):
        """
        Build the output entry of the metadata.

        :return: list, containing dicts of partial metadata
        """
        outputs = []
        output_file = None

        if self.source_build:

            registry = self.workflow.push_conf.docker_registries[0]

            build_name = get_unique_images(self.workflow)[0]
            pullspec = copy.deepcopy(build_name)
            pullspec.registry = registry.uri

            outputs, output_file = get_output(workflow=self.workflow, buildroot_id=buildroot_id,
                                              pullspec=pullspec, platform=os.uname()[4],
                                              source_build=True, logs=None)

        else:
            for platform in worker_metadatas:
                for instance in worker_metadatas[platform]['output']:
                    instance['buildroot_id'] = '{}-{}'.format(platform, instance['buildroot_id'])
                    outputs.append(instance)

        return outputs, output_file
コード例 #2
0
    def _get_build_metadata(self, platform: str):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        pullspec_image = self._determine_image_pullspec(platform)
        buildroot = get_buildroot(platform)
        output_files, _ = get_output(workflow=self.workflow, buildroot_id=buildroot['id'],
                                     pullspec=pullspec_image, platform=platform,
                                     source_build=False)
        if build_log_output := self._generate_build_log_output(platform, buildroot['id']):
            output_files.append(build_log_output)
コード例 #3
0
    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.unique_images:
            self.pullspec_image = image
            break

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.pullspec_image = image
                break

        if not self.pullspec_image:
            raise RuntimeError('Unable to determine pullspec_image')

        metadata_version = 0

        buildroot = get_buildroot(build_id=self.build_id,
                                  tasker=self.tasker,
                                  osbs=self.osbs,
                                  rpms=True)
        output_files, _ = get_output(workflow=self.workflow,
                                     buildroot_id=buildroot['id'],
                                     pullspec=self.pullspec_image,
                                     platform=self.platform,
                                     source_build=False,
                                     logs=self.get_logs())

        output = [output.metadata for output in output_files]
        koji_metadata = {
            'metadata_version': metadata_version,
            'buildroots': [buildroot],
            'output': output,
        }
        self.update_buildroot_koji(buildroot, output)

        return koji_metadata, output_files
コード例 #4
0
def test_binary_build_get_output(no_v2_digest: bool, from_scratch: bool,
                                 is_flatpak: bool,
                                 workflow: DockerBuildWorkflow, tmpdir):
    platform = "x86_64"
    buildroot_id = f'{platform}-1'
    image_pullspec = ImageName.parse("ns/image:latest")
    expected_components = []

    if not from_scratch:
        package_list = [
            'python-docker-py;1.3.1;1.fc24;noarch;(none);191456;'
            '7c1f60d8cde73e97a45e0c489f4a3b26;1438058212;(none);(none);(none);(none)',
            'fedora-repos-rawhide;24;0.1;noarch;(none);2149;'
            'd41df1e059544d906363605d47477e60;1436940126;(none);(none);(none);(none)',
            'gpg-pubkey-doc;1.0;1;noarch;(none);1000;'
            '00000000000000000000000000000000;1436940126;(none);(none);(none);(none)'
        ]
        expected_components = parse_rpm_output(package_list)

    if is_flatpak:
        workflow.user_params['flatpak'] = True

    workflow.data.image_components = {platform: expected_components}

    if from_scratch:
        workflow.data.dockerfile_images = DockerfileImages(['scratch'])
        parent_id = None
    else:
        workflow.data.dockerfile_images = DockerfileImages(['fedora:35'])
        parent_id = 'parent-id'
        (flexmock(workflow.imageutil).should_receive(
            'base_image_inspect').with_args(platform).and_return(
                {'Id': parent_id}))

    # For verifying the tags in final metadata
    workflow.data.tag_conf.add_unique_image("ns/image:1")
    workflow.data.tag_conf.add_unique_image("ns/image:2")
    # This primary image is noise. For binary build, this should not be
    # included in the metadata.
    workflow.data.tag_conf.add_primary_image("ns/image:1-2")

    # Mock for ImageUtil.get_uncompressed_layer_sizes
    layer_sizes = [
        {
            "diff_id": 1,
            "size": 100
        },
        {
            "diff_id": 2,
            "size": 200
        },
    ]
    workflow.build_dir.init_build_dirs([platform], workflow.source)
    platform_dir = workflow.build_dir.platform_dir(platform)
    (flexmock(workflow.imageutil).should_receive(
        'get_uncompressed_image_layer_sizes').with_args(
            str(platform_dir.exported_squashed_image)).and_return(layer_sizes))

    workflow.conf.conf = {
        'registries': [
            {
                'url': 'https://registry.host/',
                'insecure': False
            },
        ],
    }

    # Mock get_inspect_for_image
    image_id = 'image-id-1234'
    (flexmock(
        workflow.imageutil).should_receive('get_inspect_for_image').with_args(
            image_pullspec, platform=platform).and_return({'Id': image_id}))

    # Mock get manifest digests
    image_manifest_digest = ManifestDigest(
        {'oci': 'oci-1234'} if no_v2_digest else {'v2': '1234'})
    (flexmock(RegistryClient).should_receive(
        'get_manifest_digests').and_return(image_manifest_digest))
    # Mock getting image config
    blob_config = {'oci': 'oci-1234'} if no_v2_digest else {'v2': '1234'}
    (flexmock(RegistryClient).should_receive(
        'get_config_and_id_from_registry').and_return((blob_config, None)))

    # Assume FetchDockerArchivePlugin has run and metadata of the
    # platform-specific built image archive has been saved.
    workflow.data.plugins_results[FetchDockerArchivePlugin.key] = {
        platform: {
            'type': IMAGE_TYPE_DOCKER_ARCHIVE
        }
    }

    output, output_file = get_output(workflow,
                                     buildroot_id,
                                     image_pullspec,
                                     platform,
                                     source_build=False)

    # Prepare expected metadata

    expected_repositories = sorted([
        # Pull image with a specific tag
        image_pullspec.to_str(),
        # Pull image with a specific digest
        f'{image_pullspec.to_str(tag=False)}@{image_manifest_digest.oci}'
        if no_v2_digest else
        f'{image_pullspec.to_str(tag=False)}@{image_manifest_digest.v2}',
    ])
    per_platform_image_tags = sorted(
        image.tag
        for image in workflow.data.tag_conf.get_unique_images_with_platform(
            platform))
    expected_metadata: Dict[str, Any] = {
        'buildroot_id': buildroot_id,
        'checksum_type': 'md5',
        'arch': platform,
        'type': 'docker-image',
        'components': expected_components,
        'extra': {
            'image': {
                'arch': platform
            },
            'docker': {
                'id': image_id,
                'repositories': expected_repositories,
                'layer_sizes': layer_sizes,
                'tags': per_platform_image_tags,
                'config': blob_config,
                'digests': None,  # Set later below
            },
        },
    }

    extra_docker = expected_metadata['extra']['docker']
    if not from_scratch:
        extra_docker['parent_id'] = parent_id

    extra_docker['digests'] = (
        {
            ManifestDigest.content_type['oci']: image_manifest_digest.oci
        } if no_v2_digest else {
            ManifestDigest.content_type['v2']: image_manifest_digest.v2
        })

    # Start assertions
    assert output_file is None
    assert len(output) == 1

    image_metadata = output[0].metadata

    # Assert these image metadata firstly, then remove them and assert the
    # rest. So, no need to mock anything for get_image_output.
    assert f'docker-image-{image_id}.x86_64.tar.gz' == image_metadata.pop(
        'filename')
    assert image_metadata.pop('filesize') > 0
    assert re.match(r'^[0-9a-f]+$', image_metadata.pop('checksum'))

    # Make it easier for comparison below
    extra_docker = image_metadata['extra']['docker']
    extra_docker['repositories'] = sorted(extra_docker['repositories'])

    assert expected_metadata == image_metadata