コード例 #1
0
    def workflow(self, push=True, sync=True, build_process_failed=False,
                 postbuild_results=None, prebuild_results=None, expectv2schema2=False,
                 platform_descriptors=False):
        tag_conf = TagConf()
        tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE)
        push_conf = PushConf()
        if push:
            push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=False)
        if sync:
            push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=True)

        conf = {
            ReactorConfigKeys.VERSION_KEY: 1,
            'prefer_schema1_digest': not expectv2schema2
        }
        if platform_descriptors:
            conf['platform_descriptors'] = [
                {'platform': 'x86_64', 'architecture': 'amd64'},
            ]
        plugin_workspace = {
            ReactorConfigPlugin.key: {
                WORKSPACE_CONF_KEY: ReactorConfig(conf)
            }
        }

        mock_get_retry_session()
        builder = flexmock()
        setattr(builder, 'image_id', 'sha256:(old)')
        return flexmock(tag_conf=tag_conf,
                        push_conf=push_conf,
                        builder=builder,
                        build_process_failed=build_process_failed,
                        plugin_workspace=plugin_workspace,
                        postbuild_results=postbuild_results or {},
                        prebuild_results=prebuild_results or {})
コード例 #2
0
    def workflow(self, push=True, sync=True, build_process_failed=False,
                 postbuild_results=None, prebuild_results=None, expectv2schema2=False,
                 platform_descriptors=False):
        tag_conf = TagConf()
        tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE)
        push_conf = PushConf()
        if push:
            push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=False)
        if sync:
            push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=True)

        conf = {
            ReactorConfigKeys.VERSION_KEY: 1,
            'prefer_schema1_digest': not expectv2schema2
        }
        if platform_descriptors:
            conf['platform_descriptors'] = [
                {'platform': 'x86_64', 'architecture': 'amd64'},
            ]
        plugin_workspace = {
            ReactorConfigPlugin.key: {
                WORKSPACE_CONF_KEY: ReactorConfig(conf)
            }
        }

        mock_get_retry_session()
        builder = flexmock()
        setattr(builder, 'image_id', 'sha256:(old)')
        return flexmock(tag_conf=tag_conf,
                        push_conf=push_conf,
                        builder=builder,
                        build_process_failed=build_process_failed,
                        plugin_workspace=plugin_workspace,
                        postbuild_results=postbuild_results or {},
                        prebuild_results=prebuild_results or {})
コード例 #3
0
    def test_get_unique_images_with_platform(self):
        image = 'registry.com/org/hello:world-16111-20220103213046'
        platform = 'x86_64'

        tag_conf = TagConf()
        tag_conf.add_unique_image(image)

        expected = [ImageName.parse(f'{image}-{platform}')]
        actual = tag_conf.get_unique_images_with_platform(platform)

        assert actual == expected
コード例 #4
0
 def workflow(self):
     tag_conf = TagConf()
     tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE)
     push_conf = PushConf()
     push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI)
     builder = flexmock()
     setattr(builder, 'image_id', 'sha256:(old)')
     return flexmock(tag_conf=tag_conf,
                     push_conf=push_conf,
                     builder=builder,
                     plugin_workspace={})
コード例 #5
0
 def test_dump_empty_object(self):
     expected = {
         'primary_images': [],
         'unique_images': [],
         'floating_images': [],
     }
     assert expected == TagConf().as_dict()
コード例 #6
0
def mock_environment(tmpdir, primary_images=None, worker_annotations={}):
    if MOCK:
        mock_docker()
    tasker = DockerTasker()
    workflow = DockerBuildWorkflow(SOURCE, "test-image")
    base_image_id = '123456parent-id'
    setattr(workflow, '_base_image_inspect', {'Id': base_image_id})
    setattr(workflow, 'builder', X())
    setattr(workflow.builder, 'image_id', '123456imageid')
    setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22'))
    setattr(workflow.builder, 'source', X())
    setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id})
    setattr(workflow.builder.source, 'dockerfile_path', None)
    setattr(workflow.builder.source, 'path', None)
    setattr(workflow, 'tag_conf', TagConf())
    if primary_images:
        workflow.tag_conf.add_primary_images(primary_images)
        workflow.tag_conf.add_unique_image(primary_images[0])

    annotations = deepcopy(BUILD_ANNOTATIONS)
    if not worker_annotations:
        worker_annotations = {'ppc64le': PPC_ANNOTATIONS}
    for worker in worker_annotations:
        annotations['worker-builds'][worker] = deepcopy(
            worker_annotations[worker])

    workflow.build_result = BuildResult(image_id='123456',
                                        annotations=annotations)

    return tasker, workflow
コード例 #7
0
def mock_environment(tmpdir, primary_images=None,
                     annotations=None):
    if MOCK:
        mock_docker()
    tasker = DockerTasker()
    workflow = DockerBuildWorkflow(source=SOURCE)
    base_image_id = '123456parent-id'
    setattr(workflow, '_base_image_inspect', {'Id': base_image_id})
    setattr(workflow, 'builder', StubInsideBuilder())
    setattr(workflow.builder, 'image_id', '123456imageid')
    setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22'))
    setattr(workflow.builder, 'source', StubInsideBuilder())
    setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id})
    setattr(workflow.builder.source, 'dockerfile_path', None)
    setattr(workflow.builder.source, 'path', None)
    setattr(workflow, 'tag_conf', TagConf())
    if primary_images:
        for image in primary_images:
            if '-' in ImageName.parse(image).tag:
                workflow.tag_conf.add_primary_image(image)
        workflow.tag_conf.add_unique_image(primary_images[0])

    workflow.tag_conf.add_floating_image('namespace/httpd:floating')
    workflow.build_result = BuildResult(image_id='123456', annotations=annotations or {})

    return tasker, workflow
コード例 #8
0
    def workflow(self, push=True, sync=True, build_process_failed=False):
        tag_conf = TagConf()
        tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE)
        push_conf = PushConf()
        if push:
            push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=False)
        if sync:
            push_conf.add_pulp_registry('pulp', crane_uri=self.CRANE_URI, server_side_sync=True)

        mock_get_retry_session()
        builder = flexmock()
        setattr(builder, 'image_id', 'sha256:(old)')
        return flexmock(tag_conf=tag_conf,
                        push_conf=push_conf,
                        builder=builder,
                        build_process_failed=build_process_failed,
                        plugin_workspace={})
コード例 #9
0
 def test_as_dict(self):
     tag_conf = TagConf()
     tag_conf.add_primary_image('r.fp.o/f:35')
     tag_conf.add_floating_image('ns/img:latest')
     tag_conf.add_floating_image('ns1/img2:devel')
     expected = {
         'primary_images': [ImageName.parse('r.fp.o/f:35')],
         'unique_images': [],
         'floating_images': [
             ImageName.parse('ns/img:latest'),
             ImageName.parse('ns1/img2:devel'),
         ],
     }
     assert expected == tag_conf.as_dict()
コード例 #10
0
    def test_verify_fail_no_image(self):
        workflow = self.workflow()
        workflow.tag_conf = TagConf()
        tasker = MockerTasker()

        plugin = VerifyMediaTypesPlugin(tasker, workflow)
        with pytest.raises(ValueError) as exc:
            plugin.run()
        assert "no unique image set, impossible to verify media types" in str(exc.value)
コード例 #11
0
def mock_environment(workflow, unique_image, primary_images=None):
    wf_data = workflow.data
    setattr(wf_data, 'tag_conf', TagConf())
    if primary_images:
        for image in primary_images:
            if '-' in ImageName.parse(image).tag:
                wf_data.tag_conf.add_primary_image(image)

    wf_data.tag_conf.add_unique_image(unique_image)
    wf_data.tag_conf.add_floating_image('namespace/httpd:floating')
コード例 #12
0
    def test_verify_fail_no_image(self):
        """
        If there is no image, this plugin shouldn't run and how did we get here?
        """
        workflow = self.workflow()
        workflow.data.tag_conf = TagConf()

        plugin = VerifyMediaTypesPlugin(workflow)
        with pytest.raises(ValueError) as exc:
            plugin.run()
        assert "no unique image set, impossible to verify media types" in str(exc.value)
コード例 #13
0
    def test_save_and_load(self, tmpdir):
        """Test save workflow data and then load them back properly."""
        tag_conf = TagConf()
        tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
        tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

        wf_data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
            # Test object in dict values is serialized
            tag_conf=tag_conf,
            plugins_results={
                "plugin_a": {
                    'parent-images-koji-builds': {
                        ImageName(repo='base', tag='latest').to_str(): {
                            'id': 123456789,
                            'nvr': 'base-image-1.0-99',
                            'state': 1,
                        },
                    },
                },
                "tag_and_push": [
                    # Such object in a list should be handled properly.
                    ImageName(registry="localhost:5000",
                              repo='image',
                              tag='latest'),
                ],
                "image_build": {
                    "logs": ["Build succeeds."]
                },
            },
            koji_upload_files=[
                {
                    "local_filename": "/path/to/build1.log",
                    "dest_filename": "x86_64-build.log",
                },
                {
                    "local_filename": "/path/to/dir1/remote-source.tar.gz",
                    "dest_filename": "remote-source.tar.gz",
                },
            ])

        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))
        wf_data.save(context_dir)

        assert context_dir.workflow_json.exists()

        # Verify the saved data matches the schema
        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        try:
            validate_with_schema(saved_data, "schemas/workflow_data.json")
        except osbs.exceptions.OsbsValidationException as e:
            pytest.fail(
                f"The dumped workflow data does not match JSON schema: {e}")

        # Load and verify the loaded data
        loaded_wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)

        assert wf_data.dockerfile_images == loaded_wf_data.dockerfile_images
        assert wf_data.tag_conf == loaded_wf_data.tag_conf
        assert wf_data.plugins_results == loaded_wf_data.plugins_results
コード例 #14
0
    def test_load_from_dump(self):
        input_data = {
            "dockerfile_images": {
                "original_parents": ["scratch"],
                "local_parents": [],
                "source_registry": None,
                "organization": None,
            },
            "plugins_results": {
                "plugin_1": "result"
            },
            "tag_conf": {
                "floating_images": [
                    ImageName.parse("registry/httpd:2.4").to_str(),
                ],
            },
        }
        wf_data = ImageBuildWorkflowData.load(input_data)

        expected_df_images = DockerfileImages.load(
            input_data["dockerfile_images"])
        assert expected_df_images == wf_data.dockerfile_images
        assert input_data["plugins_results"] == wf_data.plugins_results
        assert TagConf.load(input_data["tag_conf"]) == wf_data.tag_conf
コード例 #15
0
 def test_parse_images(self, input_data, expected_primary_images,
                       expected_unique_images, expected_floating_images):
     tag_conf = TagConf.load(input_data)
     assert expected_primary_images == tag_conf.primary_images
     assert expected_unique_images == tag_conf.unique_images
     assert expected_floating_images == tag_conf.floating_images
コード例 #16
0
def mock_environment(tmpdir,
                     session=None,
                     name=None,
                     component=None,
                     version=None,
                     release=None,
                     source=None,
                     build_process_failed=False,
                     docker_registry=True,
                     pulp_registries=0,
                     blocksize=None,
                     task_states=None,
                     additional_tags=None,
                     has_config=None,
                     prefer_schema1_digest=True):
    if session is None:
        session = MockedClientSession('', task_states=None)
    if source is None:
        source = GitSource('git', 'git://hostname/path')

    if MOCK:
        mock_docker()
    tasker = DockerTasker()
    workflow = DockerBuildWorkflow(SOURCE, "test-image")
    base_image_id = '123456parent-id'
    workflow.source = StubSource()
    workflow.builder = StubInsideBuilder().for_workflow(workflow)
    workflow.builder.image_id = '123456imageid'
    workflow.builder.set_inspection_data({'Id': base_image_id})
    setattr(workflow, 'tag_conf', TagConf())
    with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df:
        df.write(
            'FROM base\n'
            'LABEL BZComponent={component} com.redhat.component={component}\n'
            'LABEL Version={version} version={version}\n'
            'LABEL Release={release} release={release}\n'.format(
                component=component, version=version, release=release))
        workflow.builder.set_df_path(df.name)
    if name and version:
        workflow.tag_conf.add_unique_image(
            'user/test-image:{v}-timestamp'.format(v=version))
    if name and version and release:
        workflow.tag_conf.add_primary_images([
            "{0}:{1}-{2}".format(name, version, release),
            "{0}:{1}".format(name, version), "{0}:latest".format(name)
        ])

    if additional_tags:
        workflow.tag_conf.add_primary_images(
            ["{0}:{1}".format(name, tag) for tag in additional_tags])

    flexmock(subprocess, Popen=fake_Popen)
    flexmock(koji, ClientSession=lambda hub, opts: session)
    flexmock(GitSource)
    setattr(workflow, 'source', source)
    setattr(workflow.source, 'lg', X())
    setattr(workflow.source.lg, 'commit_id', '123456')
    setattr(workflow, 'push_conf', PushConf())
    if docker_registry:
        docker_reg = workflow.push_conf.add_docker_registry(
            'docker.example.com')

        for image in workflow.tag_conf.images:
            tag = image.to_str(registry=False)

            if pulp_registries and prefer_schema1_digest:
                docker_reg.digests[tag] = ManifestDigest(v1=fake_digest(image),
                                                         v2='sha256:not-used')
            else:
                docker_reg.digests[tag] = ManifestDigest(v1='sha256:not-used',
                                                         v2=fake_digest(image))

            if has_config:
                docker_reg.config = {
                    'config': {
                        'architecture': LOCAL_ARCH
                    },
                    'container_config': {}
                }

    for pulp_registry in range(pulp_registries):
        workflow.push_conf.add_pulp_registry('env', 'pulp.example.com')

    with open(os.path.join(str(tmpdir), 'image.tar.xz'), 'wt') as fp:
        fp.write('x' * 2**12)
        setattr(workflow, 'exported_image_sequence',
                [{
                    'path': fp.name,
                    'type': IMAGE_TYPE_DOCKER_ARCHIVE
                }])

    if build_process_failed:
        workflow.build_result = BuildResult(
            logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
            fail_reason="not built")
    else:
        workflow.build_result = BuildResult(
            logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
            image_id="id1234")
    workflow.prebuild_plugins_conf = {}

    workflow.image_components = parse_rpm_output([
        "name1;1.0;1;" + LOCAL_ARCH + ";0;2000;" + FAKE_SIGMD5.decode() +
        ";23000;"
        "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abc;(none)",
        "name2;2.0;1;" + LOCAL_ARCH + ";0;3000;" + FAKE_SIGMD5.decode() +
        ";24000"
        "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abd;(none)",
    ])

    return tasker, workflow
コード例 #17
0
    def workflow(self, build_process_failed=False, registries=None, registry_types=None,
                 platforms=None, platform_descriptors=None, group=True, no_amd64=False,
                 fail=False):
        tag_conf = TagConf()
        tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE)

        push_conf = PushConf()

        if platform_descriptors is None:
            platform_descriptors = [
                {'platform': 'x86_64', 'architecture': 'amd64'},
                {'platform': 'ppc64le', 'architecture': 'ppc64le'},
                {'platform': 's390x', 'architecture': 's390x'},
            ]

        if platforms is None:
            platforms = [descriptor['platform'] for descriptor in platform_descriptors]

        if registries is None and registry_types is None:
            registry_types = [MEDIA_TYPE_DOCKER_V1, MEDIA_TYPE_DOCKER_V2_SCHEMA1,
                              MEDIA_TYPE_DOCKER_V2_SCHEMA2, MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST]

        if registries is None:
            registries = [{
                'url': 'https://container-registry.example.com/v2',
                'version': 'v2',
                'insecure': True,
                'expected_media_types': registry_types
            }]
        conf = {
            ReactorConfigKeys.VERSION_KEY: 1,
            'registries': registries,
        }
        if platform_descriptors:
            conf['platform_descriptors'] = platform_descriptors

        plugin_workspace = {
            ReactorConfigPlugin.key: {
                WORKSPACE_CONF_KEY: ReactorConfig(conf)
            }
        }

        flexmock(HTTPRegistryAuth).should_receive('__new__').and_return(None)
        mock_auth = None
        for registry in registries:
            def get_manifest(request):
                media_types = request.headers.get('Accept', '').split(',')
                content_type = media_types[0]

                return (200, {'Content-Type': content_type}, '{}')

            url_regex = "r'" + registry['url'] + ".*/manifests/.*'"
            url = re.compile(url_regex)
            responses.add_callback(responses.GET, url, callback=get_manifest)

            expected_types = registry.get('expected_media_types', [])
            if fail == "bad_results":
                response_types = [MEDIA_TYPE_DOCKER_V1]
            elif no_amd64:
                response_types = [MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST]
            else:
                response_types = expected_types

            reguri = RegistryURI(registry['url']).docker_uri
            if re.match('http(s)?://', reguri):
                urlbase = reguri
            else:
                urlbase = 'https://{0}'.format(reguri)

            actual_v2_url = urlbase + "/v2/foo/manifests/unique-tag"
            actual_v1_url = urlbase + "/v1/repositories/foo/tags/unique-tag"

            v1_response = self.config_response_none
            v2_response = self.config_response_none
            v2_list_response = self.config_response_none
            if MEDIA_TYPE_DOCKER_V2_SCHEMA1 in response_types:
                v1_response = self.config_response_config_v1
            if MEDIA_TYPE_DOCKER_V2_SCHEMA2 in response_types:
                v2_response = self.config_response_config_v2
            if MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST in response_types:
                v2_list_response = self.config_response_config_v2_list
            v2_header_v1 = {'Accept': MEDIA_TYPE_DOCKER_V2_SCHEMA1}
            v2_header_v2 = {'Accept': MEDIA_TYPE_DOCKER_V2_SCHEMA2}
            manifest_header = {'Accept': MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST}

            (flexmock(requests.Session)
                .should_receive('get')
                .with_args(actual_v2_url, headers=v2_header_v1,
                           auth=mock_auth, verify=False)
                .and_return(v1_response))
            (flexmock(requests.Session)
                .should_receive('get')
                .with_args(actual_v2_url, headers=v2_header_v2,
                           auth=mock_auth, verify=False)
                .and_return(v2_response))
            (flexmock(requests.Session)
                .should_receive('get')
                .with_args(actual_v2_url, headers={'Accept': MEDIA_TYPE_OCI_V1},
                           auth=mock_auth, verify=False)
                .and_return(self.config_response_none))
            (flexmock(requests.Session)
                .should_receive('get')
                .with_args(actual_v2_url, headers={'Accept': MEDIA_TYPE_OCI_V1_INDEX},
                           auth=mock_auth, verify=False)
                .and_return(self.config_response_none))
            (flexmock(requests.Session)
                .should_receive('get')
                .with_args(actual_v2_url, headers=manifest_header,
                           auth=mock_auth, verify=False)
                .and_return(v2_list_response))

            if MEDIA_TYPE_DOCKER_V1 in response_types:
                (flexmock(requests.Session)
                    .should_receive('get')
                    .with_args(actual_v1_url, headers={'Accept': MEDIA_TYPE_DOCKER_V1},
                               auth=mock_auth, verify=False)
                    .and_return(self.config_response_v1))

        digests = {'digest': None} if group else {}
        prebuild_results = {PLUGIN_CHECK_AND_SET_PLATFORMS_KEY: platforms}
        postbuild_results = {PLUGIN_GROUP_MANIFESTS_KEY: digests}

        mock_get_retry_session()
        builder = flexmock()
        setattr(builder, 'image_id', 'sha256:(old)')
        return flexmock(tag_conf=tag_conf,
                        push_conf=push_conf,
                        builder=builder,
                        build_process_failed=build_process_failed,
                        plugin_workspace=plugin_workspace,
                        prebuild_results=prebuild_results,
                        postbuild_results=postbuild_results)
コード例 #18
0
def get_workflow_data_json():
    tag_conf = TagConf()
    tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
    tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

    wf_data = ImageBuildWorkflowData(
        dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
        # Test object in dict values is serialized
        plugins_results={
            "image_build": {
                "logs": ["Build succeeds."]
            },
            "tag_and_push": [
                # Such object in a list should be handled properly.
                ImageName(registry="localhost:5000",
                          repo='image',
                          tag='latest'),
            ],
            "plugin_a": {
                'parent-images-koji-builds': {
                    ImageName(repo='base', tag='latest').to_str(): {
                        'id': 123456789,
                        'nvr': 'base-image-1.0-99',
                        'state': 1,
                    },
                },
            },
        },
        tag_conf=tag_conf,
        koji_upload_files=[
            {
                "local_filename": "/path/to/build1.log",
                "dest_filename": "x86_64-build.log",
            },
            {
                "local_filename": "/path/to/dir1/remote-source.tar.gz",
                "dest_filename": "remote-source.tar.gz",
            },
        ])

    wf_data.image_components = {
        'x86_64': [{
            'type': 'rpm',
            'name': 'python-docker-py',
            'version': '1.3.1',
            'release': '1.fc24',
            'arch': 'noarch',
            'sigmd5': '7c1f60d8cde73e97a45e0c489f4a3b26',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'fedora-repos-rawhide',
            'version': '24',
            'release': '0.1',
            'arch': 'noarch',
            'sigmd5': 'd41df1e059544d906363605d47477e60',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'gpg-pubkey-doc',
            'version': '1.0',
            'release': '1',
            'arch': 'noarch',
            'sigmd5': '00000000000000000000000000000000',
            'signature': None,
            'epoch': None
        }],
        'ppc64le': [{
            'type': 'rpm',
            'name': 'python-docker-py',
            'version': '1.3.1',
            'release': '1.fc24',
            'arch': 'noarch',
            'sigmd5': '7c1f60d8cde73e97a45e0c489f4a3b26',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'fedora-repos-rawhide',
            'version': '24',
            'release': '0.1',
            'arch': 'noarch',
            'sigmd5': 'd41df1e059544d906363605d47477e60',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'gpg-pubkey-doc',
            'version': '1.0',
            'release': '1',
            'arch': 'noarch',
            'sigmd5': '00000000000000000000000000000000',
            'signature': None,
            'epoch': None
        }],
    }

    with TemporaryDirectory() as d:
        with open(os.path.join(d, 'workflow_data.json'), 'w') as f:
            json.dump(wf_data.as_dict(), f, cls=WorkflowDataEncoder)
        with open(os.path.join(d, 'workflow_data.json')) as f:
            workflow_json = json.load(f)

    return workflow_json
コード例 #19
0
def mock_environment(tmpdir, session=None, name=None,
                     component=None, version=None, release=None,
                     source=None, build_process_failed=False,
                     is_rebuild=True, docker_registry=True,
                     pulp_registries=0, blocksize=None,
                     task_states=None, additional_tags=None,
                     has_config=None):
    if session is None:
        session = MockedClientSession('', task_states=None)
    if source is None:
        source = GitSource('git', 'git://hostname/path')

    if MOCK:
        mock_docker()
    tasker = DockerTasker()
    workflow = DockerBuildWorkflow(SOURCE, "test-image")
    base_image_id = '123456parent-id'
    setattr(workflow, '_base_image_inspect', {'Id': base_image_id})
    setattr(workflow, 'builder', X())
    setattr(workflow.builder, 'image_id', '123456imageid')
    setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22'))
    setattr(workflow.builder, 'source', X())
    setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id})
    setattr(workflow.builder.source, 'dockerfile_path', None)
    setattr(workflow.builder.source, 'path', None)
    setattr(workflow, 'tag_conf', TagConf())
    with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df:
        df.write('FROM base\n'
                 'LABEL BZComponent={component} com.redhat.component={component}\n'
                 'LABEL Version={version} version={version}\n'
                 'LABEL Release={release} release={release}\n'
                 .format(component=component, version=version, release=release))
        setattr(workflow.builder, 'df_path', df.name)
    if name and version:
        workflow.tag_conf.add_unique_image('user/test-image:{v}-timestamp'
                                           .format(v=version))
    if name and version and release:
        workflow.tag_conf.add_primary_images(["{0}:{1}-{2}".format(name,
                                                                   version,
                                                                   release),
                                              "{0}:{1}".format(name, version),
                                              "{0}:latest".format(name)])

    if additional_tags:
        workflow.tag_conf.add_primary_images(["{0}:{1}".format(name, tag)
                                              for tag in additional_tags])

    flexmock(subprocess, Popen=fake_Popen)
    flexmock(koji, ClientSession=lambda hub, opts: session)
    flexmock(GitSource)
    (flexmock(OSBS)
        .should_receive('get_build_logs')
        .with_args(BUILD_ID)
        .and_return('build logs - \u2018 \u2017 \u2019'))
    (flexmock(OSBS)
        .should_receive('get_pod_for_build')
        .with_args(BUILD_ID)
        .and_return(MockedPodResponse()))
    setattr(workflow, 'source', source)
    setattr(workflow.source, 'lg', X())
    setattr(workflow.source.lg, 'commit_id', '123456')
    setattr(workflow, 'push_conf', PushConf())
    if docker_registry:
        docker_reg = workflow.push_conf.add_docker_registry('docker.example.com')

        for image in workflow.tag_conf.images:
            tag = image.to_str(registry=False)
            if pulp_registries:
                docker_reg.digests[tag] = ManifestDigest(v1=fake_digest(image),
                                                         v2='sha256:not-used')
            else:
                docker_reg.digests[tag] = ManifestDigest(v1='sha256:not-used',
                                                         v2=fake_digest(image))

            if has_config:
                docker_reg.config = {
                    'config': {'architecture': 'x86_64'},
                    'container_config': {}
                }

    for pulp_registry in range(pulp_registries):
        workflow.push_conf.add_pulp_registry('env', 'pulp.example.com')

    with open(os.path.join(str(tmpdir), 'image.tar.xz'), 'wt') as fp:
        fp.write('x' * 2**12)
        setattr(workflow, 'exported_image_sequence', [{'path': fp.name}])

    if build_process_failed:
        workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
                                            fail_reason="not built")
    else:
        workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
                                            image_id="id1234")
    workflow.prebuild_plugins_conf = {}
    workflow.prebuild_results[CheckAndSetRebuildPlugin.key] = is_rebuild
    workflow.postbuild_results[PostBuildRPMqaPlugin.key] = [
        "name1;1.0;1;x86_64;0;2000;" + FAKE_SIGMD5.decode() + ";23000;"
        "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abc;(none)",
        "name2;2.0;1;x86_64;0;3000;" + FAKE_SIGMD5.decode() + ";24000"
        "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abd;(none)",
    ]

    return tasker, workflow
コード例 #20
0
    def workflow(self,
                 build_process_failed=False,
                 registries=None,
                 registry_types=None,
                 platforms=None,
                 platform_descriptors=None,
                 group=True,
                 fail=False,
                 limit_media_types=None):
        tag_conf = TagConf()
        tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE)

        if platform_descriptors is None:
            platform_descriptors = [
                {
                    'platform': 'x86_64',
                    'architecture': 'amd64'
                },
                {
                    'platform': 'ppc64le',
                    'architecture': 'ppc64le'
                },
                {
                    'platform': 's390x',
                    'architecture': 's390x'
                },
            ]

        if platforms is None:
            platforms = [
                descriptor['platform'] for descriptor in platform_descriptors
            ]
        no_amd64 = 'x86_64' not in platforms

        keep_types = False
        if registries or registry_types:
            keep_types = True

        if registries is None and registry_types is None:
            registry_types = [
                MEDIA_TYPE_DOCKER_V2_SCHEMA1, MEDIA_TYPE_DOCKER_V2_SCHEMA2,
                MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST, MEDIA_TYPE_OCI_V1,
                MEDIA_TYPE_OCI_V1_INDEX
            ]

        if registries is None:
            registries = [{
                'url': 'https://container-registry.example.com/v2',
                'version': 'v2',
                'insecure': True,
                'expected_media_types': registry_types
            }]
        conf = {
            'version': 1,
            'registries': registries,
        }

        if limit_media_types is not None:
            conf['source_container'] = {
                'limit_media_types': limit_media_types,
            }

        if platform_descriptors:
            conf['platform_descriptors'] = platform_descriptors

        for registry in registries:

            def get_manifest(request):
                media_types = request.headers.get('Accept', '').split(',')
                content_type = media_types[0]

                return 200, {'Content-Type': content_type}, '{}'

            url_regex = "r'" + registry['url'] + ".*/manifests/.*'"
            url = re.compile(url_regex)
            responses.add_callback(responses.GET, url, callback=get_manifest)

            expected_types = registry.get('expected_media_types',
                                          registry_types or [])
            if fail == "bad_results":
                response_types = []
            elif not keep_types and no_amd64:
                response_types = [MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST]
            else:
                response_types = expected_types

            reguri = RegistryURI(registry['url']).docker_uri
            if re.match('http(s)?://', reguri):
                urlbase = reguri
            else:
                urlbase = 'https://{0}'.format(reguri)

            actual_v2_url = urlbase + "/v2/foo/manifests/unique-tag"

            if fail == "bad_results":
                response = requests.Response()
                (flexmock(response,
                          raise_for_status=lambda: None,
                          status_code=requests.codes.ok,
                          json={},
                          headers={'Content-Type': 'application/json'}))
                v1_response = response
                v1_oci_response = response
                v1_oci_index_response = response
                v2_response = response
                v2_list_response = response
            else:
                v1_response = self.config_response_none
                v1_oci_response = self.config_response_none
                v1_oci_index_response = self.config_response_none
                v2_response = self.config_response_none
                v2_list_response = self.config_response_none

            if MEDIA_TYPE_DOCKER_V2_SCHEMA1 in response_types:
                v1_response = self.config_response_config_v1
            if MEDIA_TYPE_DOCKER_V2_SCHEMA2 in response_types:
                v2_response = self.config_response_config_v2
            if MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST in response_types:
                v2_list_response = self.config_response_config_v2_list
            if MEDIA_TYPE_OCI_V1 in response_types:
                v1_oci_response = self.config_response_config_oci_v1
            if MEDIA_TYPE_OCI_V1_INDEX in response_types:
                v1_oci_index_response = self.config_response_config_oci_v1_index

            v2_header_v1 = {'Accept': MEDIA_TYPE_DOCKER_V2_SCHEMA1}
            v2_header_v2 = {'Accept': MEDIA_TYPE_DOCKER_V2_SCHEMA2}
            manifest_header = {'Accept': MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST}

            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers=v2_header_v1,
                auth=HTTPRegistryAuth,
                verify=False).and_return(v1_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers=v2_header_v2,
                auth=HTTPRegistryAuth,
                verify=False).and_return(v2_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers={
                    'Accept': MEDIA_TYPE_OCI_V1
                },
                auth=HTTPRegistryAuth,
                verify=False).and_return(v1_oci_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers={
                    'Accept': MEDIA_TYPE_OCI_V1_INDEX
                },
                auth=HTTPRegistryAuth,
                verify=False).and_return(v1_oci_index_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers=manifest_header,
                auth=HTTPRegistryAuth,
                verify=False).and_return(v2_list_response))

        digests = {'media_type': MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST}
        if not group:
            digests = {'media_type': MEDIA_TYPE_DOCKER_V2_SCHEMA2}
        plugins_results = {
            PLUGIN_CHECK_AND_SET_PLATFORMS_KEY: platforms,
            PLUGIN_GROUP_MANIFESTS_KEY: digests,
        }

        mock_get_retry_session()
        builder = flexmock()
        setattr(builder, 'image_id', 'sha256:(old)')

        flexmock(tag_conf=tag_conf)
        wf_data = ImageBuildWorkflowData()
        wf_data.tag_conf = tag_conf
        wf_data.plugins_results = plugins_results

        return flexmock(data=wf_data,
                        builder=builder,
                        conf=Configuration(raw_config=conf),
                        build_process_failed=build_process_failed)
コード例 #21
0
def mock_environment(tmpdir,
                     session=None,
                     name=None,
                     component=None,
                     version=None,
                     release=None,
                     source=None,
                     build_process_failed=False,
                     is_rebuild=True,
                     pulp_registries=0,
                     blocksize=None,
                     task_states=None):
    if session is None:
        session = MockedClientSession('', task_states=None)
    if source is None:
        source = GitSource('git', 'git://hostname/path')

    if MOCK:
        mock_docker()
    tasker = DockerTasker()
    workflow = DockerBuildWorkflow(SOURCE, "test-image")
    base_image_id = '123456parent-id'
    setattr(workflow, '_base_image_inspect', {'Id': base_image_id})
    setattr(workflow, 'builder', X())
    setattr(workflow.builder, 'image_id', '123456imageid')
    setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22'))
    setattr(workflow.builder, 'source', X())
    setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id})
    setattr(workflow.builder.source, 'dockerfile_path', None)
    setattr(workflow.builder.source, 'path', None)
    setattr(workflow, 'tag_conf', TagConf())
    with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df:
        df.write(
            'FROM base\n'
            'LABEL BZComponent={component} com.redhat.component={component}\n'
            'LABEL Version={version} version={version}\n'
            'LABEL Release={release} release={release}\n'.format(
                component=component, version=version, release=release))
        setattr(workflow.builder, 'df_path', df.name)
    if name and version:
        workflow.tag_conf.add_unique_image(
            'user/test-image:{v}-timestamp'.format(v=version))
    if name and version and release:
        workflow.tag_conf.add_primary_images([
            "{0}:{1}-{2}".format(name, version, release),
            "{0}:{1}".format(name, version), "{0}:latest".format(name)
        ])

    flexmock(subprocess, Popen=fake_Popen)
    flexmock(koji, ClientSession=lambda hub: session)
    flexmock(GitSource)
    (flexmock(OSBS).should_receive('get_build_logs').with_args(
        BUILD_ID).and_return('build logs'))
    (flexmock(OSBS).should_receive('get_pod_for_build').with_args(
        BUILD_ID).and_return(MockedPodResponse()))
    setattr(workflow, 'source', source)
    setattr(workflow.source, 'lg', X())
    setattr(workflow.source.lg, 'commit_id', '123456')
    setattr(workflow, 'build_logs', ['docker build log\n'])
    setattr(workflow, 'push_conf', PushConf())
    docker_reg = workflow.push_conf.add_docker_registry('docker.example.com')

    for image in workflow.tag_conf.images:
        tag = image.to_str(registry=False)
        docker_reg.digests[tag] = fake_digest(image)

    for pulp_registry in range(pulp_registries):
        workflow.push_conf.add_pulp_registry('env', 'pulp.example.com')

    with open(os.path.join(str(tmpdir), 'image.tar.xz'), 'wt') as fp:
        fp.write('x' * 2**12)
        setattr(workflow, 'exported_image_sequence', [{'path': fp.name}])

    setattr(workflow, 'build_failed', build_process_failed)
    workflow.prebuild_results[CheckAndSetRebuildPlugin.key] = is_rebuild
    workflow.postbuild_results[PostBuildRPMqaPlugin.key] = [
        "name1,1.0,1,x86_64,0,2000," + FAKE_SIGMD5.decode() + ",23000",
        "name2,2.0,1,x86_64,0,3000," + FAKE_SIGMD5.decode() + ",24000",
    ]

    return tasker, workflow
コード例 #22
0
def mock_environment(tmpdir, session=None, name=None,
                     component=None, version=None, release=None,
                     source=None, build_process_failed=False,
                     is_rebuild=True, docker_registry=True,
                     pulp_registries=0, blocksize=None,
                     task_states=None, additional_tags=None,
                     has_config=None,
                     logs_return_bytes=True):
    if session is None:
        session = MockedClientSession('', task_states=None)
    if source is None:
        source = GitSource('git', 'git://hostname/path')

    if MOCK:
        mock_docker()
    tasker = DockerTasker()
    workflow = DockerBuildWorkflow(SOURCE, "test-image")
    base_image_id = '123456parent-id'
    setattr(workflow, '_base_image_inspect', {'Id': base_image_id})
    setattr(workflow, 'builder', X())
    setattr(workflow.builder, 'image_id', '123456imageid')
    setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22'))
    setattr(workflow.builder, 'source', X())
    setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id})
    setattr(workflow.builder.source, 'dockerfile_path', None)
    setattr(workflow.builder.source, 'path', None)
    setattr(workflow, 'tag_conf', TagConf())
    with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df:
        df.write('FROM base\n'
                 'LABEL BZComponent={component} com.redhat.component={component}\n'
                 'LABEL Version={version} version={version}\n'
                 'LABEL Release={release} release={release}\n'
                 .format(component=component, version=version, release=release))
        setattr(workflow.builder, 'df_path', df.name)
    if name and version:
        workflow.tag_conf.add_unique_image('user/test-image:{v}-timestamp'
                                           .format(v=version))
    if name and version and release:
        workflow.tag_conf.add_primary_images(["{0}:{1}-{2}".format(name,
                                                                   version,
                                                                   release),
                                              "{0}:{1}".format(name, version),
                                              "{0}:latest".format(name)])

    if additional_tags:
        workflow.tag_conf.add_primary_images(["{0}:{1}".format(name, tag)
                                              for tag in additional_tags])

    flexmock(subprocess, Popen=fake_Popen)
    flexmock(koji, ClientSession=lambda hub, opts: session)
    flexmock(GitSource)
    if logs_return_bytes:
        logs = b'build logs - \xe2\x80\x98 \xe2\x80\x97 \xe2\x80\x99'
    else:
        logs = 'build logs - \u2018 \u2017 \u2019'
    (flexmock(OSBS)
        .should_receive('get_build_logs')
        .with_args(BUILD_ID)
        .and_return(logs))
    (flexmock(OSBS)
        .should_receive('get_pod_for_build')
        .with_args(BUILD_ID)
        .and_return(MockedPodResponse()))
    setattr(workflow, 'source', source)
    setattr(workflow.source, 'lg', X())
    setattr(workflow.source.lg, 'commit_id', '123456')
    setattr(workflow, 'push_conf', PushConf())
    if docker_registry:
        docker_reg = workflow.push_conf.add_docker_registry('docker.example.com')

        for image in workflow.tag_conf.images:
            tag = image.to_str(registry=False)
            if pulp_registries:
                docker_reg.digests[tag] = ManifestDigest(v1=fake_digest(image),
                                                         v2='sha256:not-used')
            else:
                docker_reg.digests[tag] = ManifestDigest(v1='sha256:not-used',
                                                         v2=fake_digest(image))

            if has_config:
                docker_reg.config = {
                    'config': {'architecture': 'x86_64'},
                    'container_config': {}
                }

    for pulp_registry in range(pulp_registries):
        workflow.push_conf.add_pulp_registry('env', 'pulp.example.com')

    with open(os.path.join(str(tmpdir), 'image.tar.xz'), 'wt') as fp:
        fp.write('x' * 2**12)
        setattr(workflow, 'exported_image_sequence', [{'path': fp.name}])

    annotations = {
        'worker-builds': {
            'x86_64': {
                'build': {
                    'build-name': 'build-1-x64_64',
                },
                'metadata_fragment': 'configmap/build-1-x86_64-md',
                'metadata_fragment_key': 'metadata.json',
            },
            'ppc64le': {
                'build': {
                    'build-name': 'build-1-ppc64le',
                },
                'metadata_fragment': 'configmap/build-1-ppc64le-md',
                'metadata_fragment_key': 'metadata.json',
            }
        }
    }

    if build_process_failed:
        workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
                                            fail_reason="not built")
    else:
        workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
                                            image_id="id1234",
                                            annotations=annotations)
    workflow.prebuild_plugins_conf = {}
    workflow.prebuild_results[CheckAndSetRebuildPlugin.key] = is_rebuild
    workflow.postbuild_results[PostBuildRPMqaPlugin.key] = [
        "name1;1.0;1;x86_64;0;2000;" + FAKE_SIGMD5.decode() + ";23000;"
        "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abc;(none)",
        "name2;2.0;1;x86_64;0;3000;" + FAKE_SIGMD5.decode() + ";24000"
        "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abd;(none)",
    ]

    workflow.postbuild_results[FetchWorkerMetadataPlugin.key] = {
        'x86_64': {
            'buildroots': [
                {
                    'container': {
                        'type': 'docker',
                        'arch': 'x86_64'
                    },
                    'extra': {
                        'osbs': {
                            'build_id': '12345',
                            'builder_image_id': '67890'
                        }
                    },
                    'content_generator': {
                        'version': '1.6.23',
                        'name': 'atomic-reactor'
                    },
                    'host': {
                        'os': 'Red Hat Enterprise Linux Server 7.3 (Maipo)',
                        'arch': 'x86_64'
                    },
                    'components': [
                        {
                            'name': 'perl-Net-LibIDN',
                            'sigmd5': '1dba38d073ea8f3e6c99cfe32785c81e',
                            'arch': 'x86_64',
                            'epoch': None,
                            'version': '0.12',
                            'signature': '199e2f91fd431d51',
                            'release': '15.el7',
                            'type': 'rpm'
                        },
                        {
                            'name': 'tzdata',
                            'sigmd5': '2255a5807ca7e4d7274995db18e52bea',
                            'arch': 'noarch',
                            'epoch': None,
                            'version': '2017b',
                            'signature': '199e2f91fd431d51',
                            'release': '1.el7',
                            'type': 'rpm'
                        },
                    ],
                    'tools': [
                        {
                            'version': '1.12.6',
                            'name': 'docker'
                        }
                    ],
                    'id': 1
                }
            ],
            'metadata_version': 0,
            'output': [
                {
                    'type': 'log',
                    'arch': 'noarch',
                    'filename': 'openshift-final.log',
                    'filesize': 106690,
                    'checksum': '2efa754467c0d2ea1a98fb8bfe435955',
                    'checksum_type': 'md5',
                    'buildroot_id': 1
                },
                {
                    'type': 'log',
                    'arch': 'noarch',
                    'filename': 'build.log',
                    'filesize': 1660,
                    'checksum': '8198de09fc5940cf7495e2657039ee72',
                    'checksum_type': 'md5',
                    'buildroot_id': 1
                },
                {
                    'extra': {
                        'image': {
                            'arch': 'x86_64'
                        },
                        'docker': {
                            'repositories': [
                                'brew-pulp-docker:8888/myproject/hello-world:0.0.1-9',
                            ],
                            'parent_id': 'sha256:bf203442',
                            'id': '123456',
                        }
                    },
                    'checksum': '58a52e6f3ed52818603c2744b4e2b0a2',
                    'filename': 'test.x86_64.tar.gz',
                    'buildroot_id': 1,
                    'components': [
                        {
                            'name': 'tzdata',
                            'sigmd5': 'd9dc4e4f205428bc08a52e602747c1e9',
                            'arch': 'noarch',
                            'epoch': None,
                            'version': '2016d',
                            'signature': '199e2f91fd431d51',
                            'release': '1.el7',
                            'type': 'rpm'
                        },
                        {
                            'name': 'setup',
                            'sigmd5': 'b1e5ca72c71f94112cd9fb785b95d4da',
                            'arch': 'noarch',
                            'epoch': None,
                            'version': '2.8.71',
                            'signature': '199e2f91fd431d51',
                            'release': '6.el7',
                            'type': 'rpm'
                        },

                    ],
                    'type': 'docker-image',
                    'checksum_type': 'md5',
                    'arch': 'x86_64',
                    'filesize': 71268781
                }
            ]
        }
    }
    workflow.plugin_workspace = {
        OrchestrateBuildPlugin.key: {
            WORKSPACE_KEY_UPLOAD_DIR: 'test-dir',
            WORKSPACE_KEY_BUILD_INFO: {
               'x86_64': BuildInfo('help.md')
            }
        }
    }

    return tasker, workflow