def _store_manifest_digest(self, image, use_original_tag):
        """Store media type and digest for manifest list or v2 schema 2 manifest digest"""
        image_str = image.to_str()
        manifest_list = self._get_manifest_list(image)
        reg_client = self._get_registry_client(image.registry)
        if manifest_list:
            digest_dict = get_checksums(BytesIO(manifest_list.content),
                                        ['sha256'])
            media_type = get_manifest_media_type('v2_list')
        else:
            digests_dict = reg_client.get_all_manifests(image,
                                                        versions=('v2', ))
            media_type = get_manifest_media_type('v2')
            try:
                manifest_digest_response = digests_dict['v2']
            except KeyError as exc:
                raise RuntimeError(
                    'Unable to fetch manifest list or '
                    'v2 schema 2 digest for {} (Does image exist?)'.format(
                        image_str)) from exc

            digest_dict = get_checksums(
                BytesIO(manifest_digest_response.content), ['sha256'])

        manifest_digest = 'sha256:{}'.format(digest_dict['sha256sum'])
        parent_digests = {media_type: manifest_digest}
        if use_original_tag:
            # image tag may have been replaced with a ref for autorebuild; use original tag
            # to simplify fetching parent_images_digests data in other plugins
            image = image.copy()
            image.tag = self.workflow.builder.dockerfile_images.base_image_key.tag
            image_str = image.to_str()

        self.workflow.builder.parent_images_digests[image_str] = parent_digests
Exemple #2
0
    def _fetch_manifest_digest(self, image: ImageName) -> Dict[str, str]:
        """Fetch media type and digest for manifest list or v2 schema 2 manifest digest"""
        image_str = image.to_str()
        manifest_list = self._get_manifest_list(image)
        reg_client = self._get_registry_client(image.registry)
        if manifest_list:
            digest_dict = get_checksums(BytesIO(manifest_list.content),
                                        ['sha256'])
            media_type = get_manifest_media_type('v2_list')
        else:
            digests_dict = reg_client.get_all_manifests(image,
                                                        versions=('v2', ))
            media_type = get_manifest_media_type('v2')
            try:
                manifest_digest_response = digests_dict['v2']
            except KeyError as exc:
                raise RuntimeError(
                    'Unable to fetch manifest list or '
                    'v2 schema 2 digest for {} (Does image exist?)'.format(
                        image_str)) from exc

            digest_dict = get_checksums(
                BytesIO(manifest_digest_response.content), ['sha256'])

        manifest_digest = 'sha256:{}'.format(digest_dict['sha256sum'])
        parent_digest = {media_type: manifest_digest}
        return parent_digest
Exemple #3
0
def test_get_hexdigests(tmpdir, content, algorithms, expected):
    with tempfile.NamedTemporaryFile(dir=str(tmpdir)) as tmpfile:
        tmpfile.write(content)
        tmpfile.flush()

        checksums = get_checksums(tmpfile.name, algorithms)
        assert checksums == expected
Exemple #4
0
def test_get_hexdigests(tmpdir, content, algorithms, expected):
    with tempfile.NamedTemporaryFile(dir=str(tmpdir)) as tmpfile:
        tmpfile.write(content)
        tmpfile.flush()

        checksums = get_checksums(tmpfile.name, algorithms)
        assert checksums == expected
    def test_manifest_list_doesnt_have_current_platform(
            self, caplog, user_params):
        manifest_list = {
            'manifests': [
                {
                    'platform': {
                        'architecture': 'ppc64le'
                    },
                    'digest': 'sha256:654321'
                },
            ]
        }
        manifest_list_digest = get_checksums(
            BytesIO(json.dumps(manifest_list).encode('utf-8')),
            ['sha256'])['sha256sum']

        def workflow_callback(workflow):
            workflow = self.prepare(workflow, mock_get_manifest_list=False)
            workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = {
                'ppc64le'
            }
            release = 'rel1'
            version = 'ver1'
            config_blob = {
                'config': {
                    'Labels': {
                        'release': release,
                        'version': version
                    }
                }
            }
            (flexmock(atomic_reactor.util.RegistryClient).should_receive(
                'get_config_from_registry').and_return(config_blob).times(0))

            manifest_tag = SOURCE_REGISTRY + '/' + BASE_IMAGE_W_SHA
            base_image_result = ImageName.parse(manifest_tag)
            manifest_image = base_image_result.copy()

            (flexmock(atomic_reactor.util.RegistryClient).should_receive(
                'get_manifest_list').with_args(manifest_image).and_return(
                    flexmock(json=lambda: manifest_list,
                             content=json.dumps(manifest_list).encode(
                                 'utf-8'))).once())
            return workflow

        test_pull_base_image_plugin(user_params,
                                    SOURCE_REGISTRY,
                                    BASE_IMAGE_W_SHA, [], [],
                                    inspect_only=False,
                                    workflow_callback=workflow_callback,
                                    check_platforms=True)
        new_image = "'{}/busybox@sha256:{}'".format(SOURCE_REGISTRY,
                                                    manifest_list_digest)
        pulling_msg = "pulling image " + new_image + " from registry"
        tagging_msg = "tagging image " + new_image + " as '" + UNIQUE_ID
        assert pulling_msg in caplog.text
        assert tagging_msg in caplog.text
Exemple #6
0
def get_output_metadata(path, filename):
    """
    Describe a file by its metadata.

    :return: dict
    """
    checksums = get_checksums(path, ['md5'])
    metadata = {'filename': filename,
                'filesize': os.path.getsize(path),
                'checksum': checksums['md5sum'],
                'checksum_type': 'md5'}

    return metadata
    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        return metadata
    def test_manifest_list_doesnt_have_current_platform(self, caplog):
        manifest_list = {
            'manifests': [
                {'platform': {'architecture': 'ppc64le'}, 'digest': 'sha256:654321'},
            ]
        }
        manifest_list_digest = get_checksums(BytesIO(json.dumps(manifest_list).encode('utf-8')),
                                             ['sha256'])['sha256sum']

        def workflow_callback(workflow):
            workflow = self.prepare(workflow)
            workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(['ppc64le'])
            release = 'rel1'
            version = 'ver1'
            config_blob = {'config': {'Labels': {'release': release, 'version': version}}}
            (flexmock(atomic_reactor.util)
             .should_receive('get_config_from_registry')
             .and_return(config_blob)
             .times(0))

            manifest_tag = 'registry.example.com' + '/' + BASE_IMAGE_W_SHA
            base_image_result = ImageName.parse(manifest_tag)
            manifest_image = base_image_result.copy()

            (flexmock(atomic_reactor.util)
             .should_receive('get_manifest_list')
             .with_args(image=manifest_image, registry=manifest_image.registry, insecure=True,
                        dockercfg_path=None)
             .and_return(flexmock(json=lambda: manifest_list,
                                  content=json.dumps(manifest_list).encode('utf-8')))
             .once())
            return workflow

        test_pull_base_image_plugin(LOCALHOST_REGISTRY, BASE_IMAGE_W_SHA,
                                    [], [], reactor_config_map=True,
                                    inspect_only=False,
                                    workflow_callback=workflow_callback,
                                    check_platforms=True)
        new_image = "'registry.example.com/busybox@sha256:{}'".format(manifest_list_digest)
        pulling_msg = "pulling image " + new_image + " from registry"
        tagging_msg = "tagging image " + new_image + " as '" + UNIQUE_ID
        assert pulling_msg in caplog.text
        assert tagging_msg in caplog.text
def test_pull_base_autorebuild(monkeypatch, inspect_only, user_params):  # noqa
    mock_manifest_list = json.dumps({}).encode('utf-8')
    new_base_image = ImageName.parse(BASE_IMAGE)
    new_base_image.tag = 'newtag'
    new_base_image.registry = LOCALHOST_REGISTRY
    dgst = 'sha256:{}'.format(
        get_checksums(BytesIO(mock_manifest_list), ['sha256'])['sha256sum'])
    expected_digests = {
        BASE_IMAGE_W_REGISTRY: {
            MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST: dgst
        }
    }

    monkeypatch.setenv(
        "BUILD",
        json.dumps({
            'metadata': {
                'name': UNIQUE_ID,
            },
            'spec': {
                'triggeredBy': [
                    {
                        'imageChangeBuild': {
                            'imageID': new_base_image.to_str()
                        }
                    },
                ]
            },
        }))

    (flexmock(atomic_reactor.util.RegistryClient).should_receive(
        'get_manifest_list').and_return(flexmock(content=mock_manifest_list)))

    test_pull_base_image_plugin(user_params,
                                LOCALHOST_REGISTRY,
                                BASE_IMAGE, [new_base_image.to_str()],
                                [BASE_IMAGE_W_REGISTRY],
                                inspect_only=inspect_only,
                                check_platforms=True,
                                expected_digests=expected_digests)
    def test_parent_images_digests_orchestrator(self, caplog, user_params,
                                                fail):
        """Testing processing of parent_images_digests at an orchestrator"""

        reg_image_no_tag = '{}/{}'.format(SOURCE_REGISTRY,
                                          BASE_IMAGE_NAME.to_str(tag=False))

        test_vals = {'workflow': None, 'expected_digest': {}}
        if not fail:
            manifest_list = {
                'manifests': [
                    {
                        'platform': {
                            'architecture': 'amd64'
                        },
                        'digest': 'sha256:123456'
                    },
                    {
                        'platform': {
                            'architecture': 'ppc64le'
                        },
                        'digest': 'sha256:654321'
                    },
                ]
            }
            manifest_list_digest = get_checksums(
                BytesIO(json.dumps(manifest_list).encode('utf-8')),
                ['sha256'])['sha256sum']
            digest = 'sha256:{}'.format(manifest_list_digest)
            test_vals['expected_digest'] = {
                '{}/{}'.format(SOURCE_REGISTRY, BASE_IMAGE): {
                    MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST: digest
                }
            }

        def workflow_callback(workflow):
            workflow = self.prepare(workflow, mock_get_manifest_list=not fail)
            if fail:
                # fail to provide x86_64 platform specific digest
                manifest_list = {'manifests': []}

                (flexmock(atomic_reactor.util.RegistryClient).
                 should_receive('get_manifest_list').and_return(
                     flexmock(
                         json=lambda: manifest_list,
                         content=json.dumps(manifest_list).encode('utf-8'))))

                # platform validation will fail if manifest is missing
                # setting only one platform to skip platform validation and test negative case
                workflow.buildstep_plugins_conf[0]['args']['platforms'] = [
                    'x86_64'
                ]
                workflow.prebuild_results[
                    PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = {'x86_64'}

            test_vals['workflow'] = workflow
            return workflow

        if fail:
            with pytest.raises(PluginFailedException) as exc:
                test_pull_base_image_plugin(
                    user_params,
                    SOURCE_REGISTRY,
                    BASE_IMAGE,
                    [],
                    [],
                    inspect_only=False,
                    workflow_callback=workflow_callback,
                    check_platforms=True,  # orchestrator
                )
            assert 'not available for arches' in str(exc.value)
        else:
            test_pull_base_image_plugin(
                user_params,
                SOURCE_REGISTRY,
                BASE_IMAGE,
                [],
                [],
                inspect_only=False,
                workflow_callback=workflow_callback,
                check_platforms=True,  # orchestrator
            )

            replacing_msg = (
                "Replacing image '{}/{}' with '{}@sha256:{}'".format(
                    SOURCE_REGISTRY, BASE_IMAGE, reg_image_no_tag,
                    manifest_list_digest))
            assert replacing_msg in caplog.text

            # check if worker.builder has set correct values
            builder_digests_dict = test_vals[
                'workflow'].builder.parent_images_digests
            assert builder_digests_dict == test_vals['expected_digest']
Exemple #11
0
def mock_koji_manifest_download(tmpdir,
                                requests_mock,
                                retries=0,
                                dirs_in_remote=('app', 'deps'),
                                files_in_remote=(),
                                cachito_package_names=None,
                                change_package_names=True):
    class MockBytesIO(io.BytesIO):
        reads = 0

        def read(self, *args, **kwargs):
            if MockBytesIO.reads < retries:
                MockBytesIO.reads += 1
                raise requests.exceptions.ConnectionError

            return super(MockBytesIO, self).read(*args, **kwargs)

    flexmock(time).should_receive('sleep')
    sign_keys = ['', 'usedKey', 'notUsed']
    bad_keys = ['notUsed']
    urls = [get_srpm_url(k) for k in sign_keys]

    for url in urls:
        if any(k in url for k in bad_keys):
            requests_mock.register_uri('HEAD',
                                       url,
                                       text='Not Found',
                                       status_code=404)
        else:
            requests_mock.register_uri('HEAD', url, content=b'')

            def body_callback(request, context):
                f = MockBytesIO(b"Source RPM")
                return f

            requests_mock.register_uri('GET', url, body=body_callback)

    def body_remote_callback(request, context):
        f = MockBytesIO(targz_bytes)
        return f

    if 'app' not in dirs_in_remote:
        os.mkdir(os.path.join(str(tmpdir), 'app'))
    if 'deps' not in dirs_in_remote:
        os.mkdir(os.path.join(str(tmpdir), 'deps'))

    for dir_name in dirs_in_remote:
        os.mkdir(os.path.join(str(tmpdir), dir_name))

    for file_name in files_in_remote:
        open(os.path.join(str(tmpdir), file_name), 'w').close()

    with tarfile.open(os.path.join(str(tmpdir), 'test.tar.gz'), "w:gz") as tar:
        tar.add(os.path.join(str(tmpdir), 'app'), arcname='app')
        tar.add(os.path.join(str(tmpdir), 'deps'), arcname='deps')

    shutil.rmtree(os.path.join(str(tmpdir), 'app'))
    shutil.rmtree(os.path.join(str(tmpdir), 'deps'))

    targz_bytes = open(os.path.join(str(tmpdir), 'test.tar.gz'), 'rb').read()
    targz_checksum = get_checksums(os.path.join(str(tmpdir), 'test.tar.gz'),
                                   ['md5']).get('md5sum')
    KOJIFILE_MEAD_SOURCE_ARCHIVE['checksum'] = targz_checksum
    REMOTE_SOURCE_FILE_ARCHIVE['checksum'] = targz_checksum

    os.unlink(os.path.join(str(tmpdir), 'test.tar.gz'))

    def body_remote_json_callback(request, context):
        remote_json = {'packages': []}
        if cachito_package_names:
            for pkg in cachito_package_names:
                if change_package_names:
                    remote_json['packages'].append(
                        {'name': os.path.join('github.com', pkg)})
                else:
                    remote_json['packages'].append({'name': pkg})
        remote_cont = json.dumps(remote_json)

        remote_bytes = bytes(remote_cont, 'ascii')
        f = io.BytesIO(remote_bytes)
        return f

    requests_mock.register_uri('GET',
                               get_remote_url(KOJI_BUILD),
                               body=body_remote_callback)
    requests_mock.register_uri('GET',
                               get_remote_url(KOJI_PARENT_BUILD),
                               body=body_remote_callback)

    requests_mock.register_uri('GET',
                               get_remote_url(KOJI_BUILD,
                                              file_name=REMOTE_SOURCES_JSON),
                               body=body_remote_json_callback)
    requests_mock.register_uri('GET',
                               get_remote_url(KOJI_PARENT_BUILD,
                                              file_name=REMOTE_SOURCES_JSON),
                               body=body_remote_json_callback)
    requests_mock.register_uri('GET',
                               get_kojifile_pnc_source_url(),
                               body=body_remote_callback)
    requests_mock.register_uri(
        'HEAD',
        get_kojifile_pnc_source_url(),
        body='',
        headers={
            'Content-disposition':
            'inline; filename="{}"'.format(KOJIFILE_PNC_SOURCE_FILENAME)
        })
    requests_mock.register_uri(
        'GET',
        get_pnc_api_url(KOJI_PNC_BUILD['extra']['external_build_id']),
        headers={'Location': get_kojifile_pnc_source_url()},
        body=body_remote_callback,
        status_code=302)
    requests_mock.register_uri('GET',
                               get_kojifile_source_mead_url(
                                   KOJI_MEAD_BUILD,
                                   KOJIFILE_MEAD_SOURCE_ARCHIVE),
                               body=body_remote_callback)
    requests_mock.register_uri('GET',
                               get_remote_file_url(KOJI_BUILD),
                               body=body_remote_callback)
    requests_mock.register_uri('GET',
                               get_remote_file_url(KOJI_PARENT_BUILD),
                               body=body_remote_callback)
    def test_parent_images_digests_orchestrator(self, caplog, fail):
        """Testing processing of parent_images_digests at an orchestrator"""

        reg_image_no_tag = 'registry.example.com/{}'.format(BASE_IMAGE_NAME.to_str(tag=False))

        test_vals = {
            'workflow': None,
            'expected_digest': {}
        }
        if not fail:
            manifest_list = {
                'manifests': [
                    {'platform': {'architecture': 'amd64'}, 'digest': 'sha256:123456'},
                    {'platform': {'architecture': 'ppc64le'}, 'digest': 'sha256:654321'},
                ]
            }
            manifest_list_digest = get_checksums(BytesIO(json.dumps(manifest_list).encode('utf-8')),
                                                 ['sha256'])['sha256sum']
            digest = 'sha256:{}'.format(manifest_list_digest)
            test_vals['expected_digest'] = {
                'registry.example.com/{}'.format(BASE_IMAGE): {
                    'application/vnd.docker.distribution.manifest.list.v2+json': digest
                }
            }

        def workflow_callback(workflow):
            workflow = self.prepare(workflow)
            if fail:
                # fail to provide x86_64 platform specific digest
                manifest_list = {
                    'manifests': []
                }

                (flexmock(atomic_reactor.util)
                 .should_receive('get_manifest_list')
                 .and_return(flexmock(json=lambda: manifest_list,
                                      content=json.dumps(manifest_list).encode('utf-8')))
                 )

                # platform validation will fail if manifest is missing
                # setting only one platform to skip platform validation and test negative case
                workflow.buildstep_plugins_conf[0]['args']['platforms'] = ['x86_64']
                workflow.prebuild_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(['x86_64'])

            test_vals['workflow'] = workflow
            return workflow

        if fail:
            with pytest.raises(PluginFailedException) as exc:
                test_pull_base_image_plugin(LOCALHOST_REGISTRY, BASE_IMAGE,
                                            [], [], reactor_config_map=True,
                                            inspect_only=False,
                                            workflow_callback=workflow_callback,
                                            check_platforms=True,  # orchestrator
                                            )
            assert 'Missing arches in manifest list for base image' in str(exc)
        else:
            test_pull_base_image_plugin(LOCALHOST_REGISTRY, BASE_IMAGE,
                                        [], [], reactor_config_map=True,
                                        inspect_only=False,
                                        workflow_callback=workflow_callback,
                                        check_platforms=True,  # orchestrator
                                        )

            # replacing_msg = ("Replacing image 'registry.example.com/{}'".format(BASE_IMAGE))
            replacing_msg = ("Replacing image 'registry.example.com/{}' with '{}@sha256:{}'"
                             .format(BASE_IMAGE, reg_image_no_tag, manifest_list_digest))
            assert replacing_msg in caplog.text

            # check if worker.builder has set correct values
            builder_digests_dict = test_vals['workflow'].builder.parent_images_digests
            assert builder_digests_dict == test_vals['expected_digest']