Пример #1
0
    def test_save_and_load(self, tmpdir):
        """Test save workflow data and then load them back properly."""
        tag_conf = TagConf()
        tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
        tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

        wf_data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
            # Test object in dict values is serialized
            tag_conf=tag_conf,
            plugins_results={
                "plugin_a": {
                    'parent-images-koji-builds': {
                        ImageName(repo='base', tag='latest').to_str(): {
                            'id': 123456789,
                            'nvr': 'base-image-1.0-99',
                            'state': 1,
                        },
                    },
                },
                "tag_and_push": [
                    # Such object in a list should be handled properly.
                    ImageName(registry="localhost:5000",
                              repo='image',
                              tag='latest'),
                ],
                "image_build": {
                    "logs": ["Build succeeds."]
                },
            },
            koji_upload_files=[
                {
                    "local_filename": "/path/to/build1.log",
                    "dest_filename": "x86_64-build.log",
                },
                {
                    "local_filename": "/path/to/dir1/remote-source.tar.gz",
                    "dest_filename": "remote-source.tar.gz",
                },
            ])

        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))
        wf_data.save(context_dir)

        assert context_dir.workflow_json.exists()

        # Verify the saved data matches the schema
        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        try:
            validate_with_schema(saved_data, "schemas/workflow_data.json")
        except osbs.exceptions.OsbsValidationException as e:
            pytest.fail(
                f"The dumped workflow data does not match JSON schema: {e}")

        # Load and verify the loaded data
        loaded_wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)

        assert wf_data.dockerfile_images == loaded_wf_data.dockerfile_images
        assert wf_data.tag_conf == loaded_wf_data.tag_conf
        assert wf_data.plugins_results == loaded_wf_data.plugins_results
Пример #2
0
 def test_as_dict(self):
     tag_conf = TagConf()
     tag_conf.add_primary_image('r.fp.o/f:35')
     tag_conf.add_floating_image('ns/img:latest')
     tag_conf.add_floating_image('ns1/img2:devel')
     expected = {
         'primary_images': [ImageName.parse('r.fp.o/f:35')],
         'unique_images': [],
         'floating_images': [
             ImageName.parse('ns/img:latest'),
             ImageName.parse('ns1/img2:devel'),
         ],
     }
     assert expected == tag_conf.as_dict()
Пример #3
0
def get_workflow_data_json():
    tag_conf = TagConf()
    tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
    tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

    wf_data = ImageBuildWorkflowData(
        dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
        # Test object in dict values is serialized
        plugins_results={
            "image_build": {
                "logs": ["Build succeeds."]
            },
            "tag_and_push": [
                # Such object in a list should be handled properly.
                ImageName(registry="localhost:5000",
                          repo='image',
                          tag='latest'),
            ],
            "plugin_a": {
                'parent-images-koji-builds': {
                    ImageName(repo='base', tag='latest').to_str(): {
                        'id': 123456789,
                        'nvr': 'base-image-1.0-99',
                        'state': 1,
                    },
                },
            },
        },
        tag_conf=tag_conf,
        koji_upload_files=[
            {
                "local_filename": "/path/to/build1.log",
                "dest_filename": "x86_64-build.log",
            },
            {
                "local_filename": "/path/to/dir1/remote-source.tar.gz",
                "dest_filename": "remote-source.tar.gz",
            },
        ])

    wf_data.image_components = {
        'x86_64': [{
            'type': 'rpm',
            'name': 'python-docker-py',
            'version': '1.3.1',
            'release': '1.fc24',
            'arch': 'noarch',
            'sigmd5': '7c1f60d8cde73e97a45e0c489f4a3b26',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'fedora-repos-rawhide',
            'version': '24',
            'release': '0.1',
            'arch': 'noarch',
            'sigmd5': 'd41df1e059544d906363605d47477e60',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'gpg-pubkey-doc',
            'version': '1.0',
            'release': '1',
            'arch': 'noarch',
            'sigmd5': '00000000000000000000000000000000',
            'signature': None,
            'epoch': None
        }],
        'ppc64le': [{
            'type': 'rpm',
            'name': 'python-docker-py',
            'version': '1.3.1',
            'release': '1.fc24',
            'arch': 'noarch',
            'sigmd5': '7c1f60d8cde73e97a45e0c489f4a3b26',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'fedora-repos-rawhide',
            'version': '24',
            'release': '0.1',
            'arch': 'noarch',
            'sigmd5': 'd41df1e059544d906363605d47477e60',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'gpg-pubkey-doc',
            'version': '1.0',
            'release': '1',
            'arch': 'noarch',
            'sigmd5': '00000000000000000000000000000000',
            'signature': None,
            'epoch': None
        }],
    }

    with TemporaryDirectory() as d:
        with open(os.path.join(d, 'workflow_data.json'), 'w') as f:
            json.dump(wf_data.as_dict(), f, cls=WorkflowDataEncoder)
        with open(os.path.join(d, 'workflow_data.json')) as f:
            workflow_json = json.load(f)

    return workflow_json