Beispiel #1
0
    def instantiate_plugin(self,
                           tmpdir,
                           parent_labels,
                           current_scope,
                           base_from_scratch=False):
        workflow = flexmock()
        setattr(workflow, 'builder', flexmock())
        filename = os.path.join(str(tmpdir), 'Dockerfile')
        with open(filename, 'wt') as df:
            df.write('FROM scratch\n')
            if current_scope:
                df.write('LABEL distribution-scope={}\n'.format(current_scope))

        setattr(workflow.builder, 'df_path', filename)

        setattr(workflow.builder, 'base_image_inspect', {})
        if not base_from_scratch:
            setattr(workflow.builder, 'base_image_inspect',
                    {INSPECT_CONFIG: {
                        'Labels': parent_labels,
                    }})

        dockerfile_images = DockerfileImages([])
        if base_from_scratch:
            dockerfile_images = DockerfileImages(['scratch'])
        setattr(workflow.builder, 'dockerfile_images', dockerfile_images)

        plugin = DistributionScopePlugin(None, workflow)
        plugin.log = logging.getLogger('plugin')
        return plugin
    def test_parent_image_injected(self, caplog, workflow, base_from_scratch, custom_base_image):
        koji_session()

        wf_data = workflow.data

        if base_from_scratch:
            wf_data.dockerfile_images = DockerfileImages(['scratch'])
        elif custom_base_image:
            wf_data.dockerfile_images = DockerfileImages(['koji/image-build'])

        previous_parent_image = wf_data.dockerfile_images.base_image

        self.run_plugin_with_args(workflow, base_from_scratch=base_from_scratch,
                                  custom_base_image=custom_base_image)
        if base_from_scratch:
            assert str(previous_parent_image) == str(wf_data.dockerfile_images.base_image)

            log_msg = "from scratch can't inject parent image"
            assert log_msg in caplog.text
        elif custom_base_image:
            assert str(previous_parent_image) == str(wf_data.dockerfile_images.base_image)

            log_msg = "custom base image builds can't inject parent image"
            assert log_msg in caplog.text
        else:
            assert str(previous_parent_image) != str(wf_data.dockerfile_images.base_image)
Beispiel #3
0
    def test_update_dockerfile_images_from_config(self, tmp_path, images_exist, organization):
        config = REQUIRED_CONFIG

        if organization:
            config += "\nregistries_organization: " + organization

        config_yaml = tmp_path / 'config.yaml'
        config_yaml.write_text(dedent(config), "utf-8")

        if images_exist:
            parent_images = ['parent:latest', 'base:latest']
            if organization:
                expect_images = [ImageName.parse('source_registry.com/organization/base:latest'),
                                 ImageName.parse('source_registry.com/organization/parent:latest')]
            else:
                expect_images = [ImageName.parse('source_registry.com/base:latest'),
                                 ImageName.parse('source_registry.com/parent:latest')]
        else:
            parent_images = []

        dockerfile_images = DockerfileImages(parent_images)

        conf = Configuration(config_path=str(config_yaml))
        conf.update_dockerfile_images_from_config(dockerfile_images)

        if images_exist:
            assert len(dockerfile_images) == 2
            assert dockerfile_images.keys() == expect_images
        else:
            assert not dockerfile_images
    def instantiate_plugin(self,
                           workflow,
                           parent_labels,
                           current_scope,
                           base_from_scratch=False):
        with open(Path(workflow.source.path) / "Dockerfile", 'wt') as df:
            df.write('FROM scratch\n')
            if current_scope:
                df.write('LABEL distribution-scope={}\n'.format(current_scope))

        workflow.build_dir.init_build_dirs(["x86_64"], workflow.source)

        if not base_from_scratch:
            (flexmock(workflow.imageutil).should_receive(
                'base_image_inspect').and_return(
                    {INSPECT_CONFIG: {
                        'Labels': parent_labels
                    }}))
        else:
            flexmock(workflow.imageutil).should_receive(
                'base_image_inspect').and_return({})

        dockerfile_images = DockerfileImages([])
        if base_from_scratch:
            dockerfile_images = DockerfileImages(['scratch'])
        workflow.data.dockerfile_images = dockerfile_images

        plugin = DistributionScopePlugin(workflow)
        plugin.log = logging.getLogger('plugin')
        return plugin
Beispiel #5
0
 def update_dockerfile_images_from_config(
         self, dockerfile_images: DockerfileImages) -> None:
     """
     Set source registry and organization in dockerfile images.
     """
     # only update if there are any actual images (not just 'scratch')
     if dockerfile_images:
         source_registry_docker_uri = self.source_registry['uri'].docker_uri
         organization = self.registries_organization
         dockerfile_images.set_source_registry(source_registry_docker_uri,
                                               organization)
def test_multiple_repourls(workflow, build_dir, caplog, base_from_scratch,
                           parent_images, inject_proxy, repos, filenames):
    workflow = prepare(workflow, build_dir, yum_repourls={'x86_64': repos})

    dockerfile_images = []
    if parent_images:
        dockerfile_images.append('parent_image:latest')
    if base_from_scratch:
        dockerfile_images.append('scratch')
    workflow.data.dockerfile_images = DockerfileImages(dockerfile_images)
    repo_content = '''[repo]\n'''

    for repofile_url in repos:
        responses.add(responses.GET, repofile_url, body=repo_content)

    (MockEnv(workflow).for_plugin(InjectYumReposPlugin.key,
                                  args={
                                      'inject_proxy': inject_proxy
                                  }).create_runner().run())

    repos_path = workflow.build_dir.any_platform.path / RELATIVE_REPOS_PATH

    if not parent_images:
        assert InjectYumReposPlugin.key is not None
        assert not repos_path.exists()
    else:
        files = os.listdir(repos_path)
        assert len(files) == 2
        for filename in filenames:
            with open(repos_path / filename, 'r') as f:
                content = f.read()
            if inject_proxy:
                assert 'proxy = %s\n\n' % inject_proxy in content
            else:
                assert 'proxy' not in content
def test_ignore_custom_parent_image(updated_df_images, dockerfile,
                                    expected_dockerfile,
                                    workflow: DockerBuildWorkflow):
    """Test plugin works well after add_filesystem plugin run."""
    # Initialize firstly, otherwise the next __setitem__ call will fail.
    workflow.data.dockerfile_images = DockerfileImages(
        [pullable for pullable, _ in updated_df_images])

    for pullable, local_parent in updated_df_images:
        if local_parent is not None:
            workflow.data.dockerfile_images[pullable] = local_parent
    workflow.build_dir.init_build_dirs(['x86_64'], workflow.source)

    def _update_dockerfile(build_dir: BuildDir):
        build_dir.dockerfile.content = dockerfile

    workflow.build_dir.for_each_platform(_update_dockerfile)

    run_plugin(workflow)

    def _assert(build_dir: BuildDir):
        assert build_dir.dockerfile.content.strip(
        ) == expected_dockerfile.strip()

    workflow.build_dir.for_each_platform(_assert)
Beispiel #8
0
def test_compare_components_plugin(workflow, caplog, base_from_scratch,
                                   mismatch, exception, fail):
    mock_workflow(workflow)
    components_per_arch = mock_components()

    # example data has 2 log items before component item hence output[2]
    component = components_per_arch['ppc64le'][0]
    if mismatch:
        component['version'] = 'bacon'
    if exception:
        workflow.conf.conf = {
            'version': 1,
            'package_comparison_exceptions': [component['name']]
        }

    workflow.data.postbuild_results[
        PostBuildRPMqaPlugin.key] = components_per_arch
    if base_from_scratch:
        workflow.data.dockerfile_images = DockerfileImages(['scratch'])

    runner = PostBuildPluginsRunner(workflow, [{
        'name': PLUGIN_COMPARE_COMPONENTS_KEY,
        "args": {}
    }])

    if fail and not base_from_scratch:
        with pytest.raises(PluginFailedException):
            runner.run()
    else:
        runner.run()
        if base_from_scratch:
            log_msg = "Skipping comparing components: unsupported for FROM-scratch images"
            assert log_msg in caplog.text
Beispiel #9
0
 def test_default_parameters(self, base_from_scratch):
     workflow = mock_workflow()
     self.should_squash_with_kwargs(workflow,
                                    base_from_scratch=base_from_scratch)
     if base_from_scratch:
         workflow.builder.dockerfile_images = DockerfileImages(['scratch'])
     self.run_plugin_with_args(workflow, {})
Beispiel #10
0
 def __init__(self):
     self.tasker = DockerTasker(retry_times=0)
     self.dockerfile_images = DockerfileImages(['Fedora:22'])
     self.image_id = 'image_id'
     self.image = 'image'
     self.df_path = 'df_path'
     self.df_dir = 'df_dir'
Beispiel #11
0
    def test_repository_selection(self, workflow, organization,
                                  archive_registry, repositories, selected):
        archive_repo_template = archive_registry + '/fedora{}'
        archives = [{
            'id': 1,
            'extra': {
                'docker': {
                    'repositories': [
                        archive_repo_template.format(repo)
                        for repo in repositories
                    ]
                }
            }
        }]
        enclosed_repo_template = 'source_registry.com/{}/fedora{}'
        repo_template = 'source_registry.com/fedora{}'

        koji_session(archives=archives)
        workflow.data.dockerfile_images = DockerfileImages(
            ['spam.com/fedora:some_tag'])
        self.run_plugin_with_args(workflow, organization=organization)
        if organization:
            selected_repo = enclosed_repo_template.format(
                organization, selected)
        else:
            selected_repo = repo_template.format(selected)

        assert str(workflow.data.dockerfile_images.base_image) == selected_repo
Beispiel #12
0
    def test_repository_from_koji_build(self, workflow, registry_in_koji,
                                        repositories, selected):
        # Populate archives to ensure koji build takes precedence
        archives = [{
            'id': 1,
            'extra': {
                'docker': {
                    'repositories': [
                        'spam.com/notselected/fedora{}'.format(repo)
                        for repo in repositories
                    ]
                }
            }
        }]

        koji_repo_template = registry_in_koji + '/fedora{}'
        koji_build_info = copy.deepcopy(KOJI_BUILD_INFO)
        koji_build_info['extra'] = {
            'image': {
                'index': {
                    'pull':
                    [koji_repo_template.format(repo) for repo in repositories]
                }
            }
        }

        repo_template = 'source_registry.com/fedora{}'
        koji_session(archives=archives, koji_build_info=koji_build_info)
        workflow.data.dockerfile_images = DockerfileImages(
            ['spam.com/fedora:some_tag'])
        self.run_plugin_with_args(workflow)
        assert str(workflow.data.dockerfile_images.base_image
                   ) == repo_template.format(selected)
Beispiel #13
0
 def __init__(self):
     self.image_id = INPUT_IMAGE
     self.source = Y()
     self.source.dockerfile_path = None
     self.source.path = None
     self.dockerfile_images = DockerfileImages([])
     self.df_dir = None
def test_pull_raises_retry_error(workflow, caplog):
    if MOCK:
        mock_docker(remember_images=True)

    tasker = DockerTasker(retry_times=1)
    workflow.builder = MockBuilder()
    image_name = ImageName.parse(IMAGE_RAISE_RETRYGENERATOREXCEPTION)
    base_image_str = "{}/{}:{}".format(SOURCE_REGISTRY, image_name.repo,
                                       'some')
    source_registry = image_name.registry
    workflow.builder.dockerfile_images = DockerfileImages([base_image_str])
    workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
    workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\
        ReactorConfig({'version': 1,
                       'source_registry': {'url': source_registry,
                                           'insecure': True}})

    runner = PreBuildPluginsRunner(
        tasker,
        workflow,
        [{
            'name': PullBaseImagePlugin.key,
            'args': {},
        }],
    )

    with pytest.raises(Exception):
        runner.run()

    exp_img = ImageName.parse(base_image_str)
    exp_img.registry = source_registry
    assert 'failed to pull image: {}'.format(exp_img.to_str()) in caplog.text
Beispiel #15
0
 def set_dockerfile_images(self, images: Union[DockerfileImages,
                                               List[str]]):
     """Set dockerfile images in the workflow."""
     if not isinstance(images, DockerfileImages):
         images = DockerfileImages(images)
     self.workflow.data.dockerfile_images = images
     return self
Beispiel #16
0
def test_parent_images_to_str(workflow, caplog):
    workflow.data.dockerfile_images = DockerfileImages(
        ['fedora:latest', 'bacon'])
    workflow.data.dockerfile_images['fedora:latest'] = "spam"
    expected_results = {"fedora:latest": "spam:latest"}
    assert workflow.parent_images_to_str() == expected_results
    assert "None in: base bacon:latest has parent None" in caplog.text
Beispiel #17
0
    def test_save_and_load(self, tmpdir):
        """Test save workflow data and then load them back properly."""
        tag_conf = TagConf()
        tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
        tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

        wf_data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
            # Test object in dict values is serialized
            tag_conf=tag_conf,
            plugins_results={
                "plugin_a": {
                    'parent-images-koji-builds': {
                        ImageName(repo='base', tag='latest').to_str(): {
                            'id': 123456789,
                            'nvr': 'base-image-1.0-99',
                            'state': 1,
                        },
                    },
                },
                "tag_and_push": [
                    # Such object in a list should be handled properly.
                    ImageName(registry="localhost:5000",
                              repo='image',
                              tag='latest'),
                ],
                "image_build": {
                    "logs": ["Build succeeds."]
                },
            },
            koji_upload_files=[
                {
                    "local_filename": "/path/to/build1.log",
                    "dest_filename": "x86_64-build.log",
                },
                {
                    "local_filename": "/path/to/dir1/remote-source.tar.gz",
                    "dest_filename": "remote-source.tar.gz",
                },
            ])

        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))
        wf_data.save(context_dir)

        assert context_dir.workflow_json.exists()

        # Verify the saved data matches the schema
        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        try:
            validate_with_schema(saved_data, "schemas/workflow_data.json")
        except osbs.exceptions.OsbsValidationException as e:
            pytest.fail(
                f"The dumped workflow data does not match JSON schema: {e}")

        # Load and verify the loaded data
        loaded_wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)

        assert wf_data.dockerfile_images == loaded_wf_data.dockerfile_images
        assert wf_data.tag_conf == loaded_wf_data.tag_conf
        assert wf_data.plugins_results == loaded_wf_data.plugins_results
Beispiel #18
0
 def __init__(self):
     self.tasker = DockerTasker()
     self.dockerfile_images = DockerfileImages(
         ['source_registry.com/fedora:26'])
     self.image_id = 'image_id'
     self.image = 'image'
     self.df_path = 'df_path'
     self.df_dir = 'df_dir'
Beispiel #19
0
def workflow(workflow):
    workflow.data.dockerfile_images = DockerfileImages(['Fedora:22'])
    workflow.data.image_id = 'image_id'
    flexmock(workflow, image='image')
    (flexmock(
        workflow.imageutil).should_receive('base_image_inspect').and_return(
            {'Id': BASE_IMAGE_ID}))
    return workflow
Beispiel #20
0
    def set_df_path(self, path):
        self._df_path = path
        dfp = df_parser(path)
        if dfp.baseimage is None:
            raise RuntimeError("no base image specified in Dockerfile")

        self.dockerfile_images = DockerfileImages(dfp.parent_images)
        logger.debug("base image specified in dockerfile = '%s'", dfp.baseimage)
        logger.debug("parent images specified in dockerfile = '%s'", dfp.parent_images)

        custom_base_images = set()
        for image in dfp.parent_images:
            image_name = ImageName.parse(image)
            image_str = image_name.to_str()
            if base_image_is_custom(image_str):
                custom_base_images.add(image_str)

        if len(custom_base_images) > 1:
            raise NotImplementedError("multiple different custom base images"
                                      " aren't allowed in Dockerfile")

        # validate user has not specified COPY --from=image
        builders = []
        for stmt in dfp.structure:
            if stmt['instruction'] == 'FROM':
                # extract "bar" from "foo as bar" and record as build stage
                match = re.search(r'\S+ \s+  as  \s+ (\S+)', stmt['value'], re.I | re.X)
                builders.append(match.group(1) if match else None)
            elif stmt['instruction'] == 'COPY':
                match = re.search(r'--from=(\S+)', stmt['value'], re.I)
                if not match:
                    continue
                stage = match.group(1)
                # error unless the --from is the index or name of a stage we've seen
                if any(stage in [str(idx), builder] for idx, builder in enumerate(builders)):
                    continue
                raise RuntimeError(dedent("""\
                    OSBS does not support COPY --from unless it matches a build stage.
                    Dockerfile instruction was:
                      {}
                    To use an image with COPY --from, specify it in a stage with FROM, e.g.
                      FROM {} AS source
                      FROM ...
                      COPY --from=source <src> <dest>
                    """).format(stmt['content'], stage))
Beispiel #21
0
def workflow(workflow, source_dir):
    workflow.source = MockSource(source_dir)
    workflow.data.dockerfile_images = DockerfileImages(['base:latest'])
    workflow.data.dockerfile_images['base:latest'] = ImageName.parse('base:stubDigest')
    base_inspect = {INSPECT_CONFIG: {'Labels': BASE_IMAGE_LABELS.copy()}}
    flexmock(workflow.imageutil).should_receive('base_image_inspect').and_return(base_inspect)
    flexmock(workflow.imageutil).should_receive('get_inspect_for_image').and_return(base_inspect)
    workflow.data.parent_images_digests = {'base:stubDigest': {V2_LIST: 'stubDigest'}}
    return workflow
Beispiel #22
0
def test_ensure_workflow_data_is_saved_in_various_conditions(
        build_result, build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()
    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params).should_receive("source").and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    if build_result == "normal_return":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").once())

        task.execute()

    elif build_result == "error_raised":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").and_raise(BuildCanceledException))

        with pytest.raises(BuildCanceledException):
            task.execute()

    elif build_result == "terminated":
        # Start the task.execute in a separate process and terminate it.
        # This simulates the Cancel behavior by TERM signal.

        def _build_docker_image(self, *args, **kwargs):
            def _cancel_build(*args, **kwargs):
                raise BuildCanceledException()

            signal.signal(signal.SIGTERM, _cancel_build)
            # Whatever how long to sleep, just meaning it's running.
            time.sleep(5)

        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").replace_with(_build_docker_image))

        proc = multiprocessing.Process(target=task.execute)
        proc.start()

        # wait a short a while for the task.execute to run in the separate process.
        time.sleep(0.3)
        proc.terminate()

    assert context_dir.join("workflow.json").exists()

    wf_data = ImageBuildWorkflowData()
    wf_data.load_from_dir(ContextDir(Path(context_dir)))
    # As long as the data is loaded successfully, just check some
    # attributes to check the data.
    assert DockerfileImages() == wf_data.dockerfile_images
    assert {} == wf_data.prebuild_results
Beispiel #23
0
    def __init__(self):
        mock_docker()
        self.tasker = DockerTasker()
        self.dockerfile_images = DockerfileImages(['fedora:25'])
        self.image_id = 'image_id'
        self.image = INPUT_IMAGE
        self.df_path = 'df_path'
        self.df_dir = 'df_dir'

        def simplegen(x, y):
            yield "some\u2018".encode('utf-8')

        flexmock(self.tasker, build_image_from_path=simplegen)
Beispiel #24
0
    def __init__(self, source, image, **kwargs):
        """
        """
        LastLogger.__init__(self)
        BuilderStateMachine.__init__(self)

        print_version_of_tools()

        self.tasker = ContainerTasker()

        # arguments for build
        self.source = source
        # configuration of source_registy and pull_registries with insecure and
        # dockercfg_path, by registry key
        self.pull_registries = {}
        self.dockerfile_images = DockerfileImages([])
        self._base_image_inspect = None
        self.parents_pulled = False
        self._parent_images_inspect = {}  # locally available image => inspect
        self.parent_images_digests = {}
        self.image_id = None
        self.built_image_info = None
        self.image = ImageName.parse(image)

        # get info about base image from dockerfile
        build_file_path, build_file_dir = self.source.get_build_file_path()

        self.df_dir = build_file_dir
        self._df_path = None
        self.original_df = None
        self.buildargs = {}  # --buildargs for container build

        # If the Dockerfile will be entirely generated from the container.yaml
        # (in the Flatpak case, say), then a plugin needs to create the Dockerfile
        # and set the base image
        if build_file_path.endswith(DOCKERFILE_FILENAME):
            self.set_df_path(build_file_path)
def test_parent_images_to_str(tmpdir, caplog):
    if MOCK:
        mock_docker()

    source = {
        'provider': 'path',
        'uri': 'file://' + DOCKERFILE_OK_PATH,
        'tmpdir': str(tmpdir)
    }
    b = InsideBuilder(get_source_instance_for(source), 'built-img')
    b.dockerfile_images = DockerfileImages(['fedora:latest', 'bacon'])
    b.dockerfile_images['fedora:latest'] = "spam"
    expected_results = {"fedora:latest": "spam:latest"}
    assert b.parent_images_to_str() == expected_results
    assert "None in: base bacon:latest has parent None" in caplog.text
Beispiel #26
0
def prepare(workflow,
            build_dir,
            inherited_user='',
            dockerfile=DEFAULT_DOCKERFILE,
            scratch=False,
            platforms=None,
            include_koji_repo=False,
            yum_proxy=None,
            koji_ssl_certs=False,
            root_url=ROOT,
            yum_repourls=None):
    if yum_repourls is None:
        yum_repourls = {}
    if not platforms:
        platforms = ['x86_64']
    if koji_ssl_certs:
        build_dir.joinpath("cert").write_text("cert", "utf-8")
        build_dir.joinpath("serverca").write_text("serverca", "utf-8")
    workflow.user_params['scratch'] = scratch
    workflow.data.prebuild_results[
        PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = platforms
    workflow.source = MockSource(build_dir)
    inspect_data = {INSPECT_CONFIG: {'User': inherited_user}}
    flexmock(workflow.imageutil).should_receive(
        'base_image_inspect').and_return(inspect_data)
    with open(workflow.source.dockerfile_path, 'w') as f:
        f.write(dockerfile)
    workflow.build_dir.init_build_dirs(platforms, workflow.source)
    df = df_parser(str(build_dir))
    df.content = dockerfile
    workflow.data.dockerfile_images = DockerfileImages(df.parent_images)
    if include_koji_repo:
        session = MockedClientSession(hub='', opts=None)
        workflow.koji_session = session
        flexmock(koji, ClientSession=session, PathInfo=MockedPathInfo)

        workflow.conf.conf = {'version': 1, 'yum_proxy': yum_proxy}
        add_koji_map_in_workflow(
            workflow,
            hub_url='',
            root_url=root_url,
            ssl_certs_dir=str(build_dir) if koji_ssl_certs else None)
    workflow.data.prebuild_results[PLUGIN_RESOLVE_COMPOSES_KEY] = {
        'composes': [],
        'include_koji_repo': include_koji_repo,
        'yum_repourls': yum_repourls,
    }
    return workflow
Beispiel #27
0
class X(object):
    image_id = INPUT_IMAGE
    source = Y()
    source.dockerfile_path = None
    source.path = None
    dockerfile_images = DockerfileImages(['qwe:asd'])
    dockerfile_images['qwe:asd'] = "sha256:spamneggs"

    def parent_images_to_str(self):
        result = {}
        for key, val in self.dockerfile_images.items():
            if val:
                result[key.to_str()] = val.to_str()
            else:
                result[key.to_str()] = 'sha256:bacon'
        return result
Beispiel #28
0
def mock_env(workflow,
             source_dir: Path,
             labels=None,
             flatpak=False,
             dockerfile_f=mock_dockerfile,
             isolated=None):
    """Mock test environment

    :param workflow: a DockerBuildWorkflow object for a specific test.
    :type workflow: DockerBuildWorkflow
    :param source_dir: path to the source directory holding the dockerfile to be created.
    :type source_dir: pathlib.Path
    :param labels: an iterable labels set for testing operator bundle or appregistry build.
    :type labels: iterable[str]
    :param bool flatpak: a flag to indicate whether the test is for a flatpak build.
    :param callable dockerfile_f: a function to create fake dockerfile. Different test could pass a
        specific function for itself.
    :param bool isolated: a flag to indicated if build is isolated
    """
    # Make sure the version label will be presented in labels
    if not labels:
        labels = ['version="1.0"']
    elif not any([label.startswith('version') for label in labels]):
        labels.append('version="1.0"')

    if not flatpak:
        # flatpak build has no Dockefile
        dockerfile_f(source_dir, labels)

    env = MockEnv(workflow).for_plugin(CheckUserSettingsPlugin.key,
                                       {'flatpak': flatpak})
    env.workflow.source = FakeSource(source_dir)
    env.workflow.build_dir.init_build_dirs(["aarch64", "x86_64"],
                                           env.workflow.source)

    if isolated is not None:
        env.set_isolated(isolated)

    dfp = env.workflow.build_dir.any_platform.dockerfile
    env.workflow.data.dockerfile_images = DockerfileImages(
        [] if flatpak else dfp.parent_images)

    flexmock(env.workflow.imageutil).should_receive(
        "base_image_inspect").and_return({})

    return env.create_runner()
Beispiel #29
0
def mock_workflow(workflow,
                  build_dir: Path,
                  dockerfile=DEFAULT_DOCKERFILE,
                  platforms=None,
                  scratch=False):
    workflow.user_params['scratch'] = scratch
    workflow.source = MockSource(build_dir)
    if not platforms:
        platforms = ['x86_64']
    workflow.data.plugins_results[PLUGIN_CHECK_AND_SET_PLATFORMS_KEY] = set(
        platforms)
    with open(workflow.source.dockerfile_path, 'w') as f:
        f.write(dockerfile)
    workflow.build_dir.init_build_dirs(platforms, workflow.source)
    df = DockerfileParser(str(build_dir))
    workflow.data.dockerfile_images = DockerfileImages(df.parent_images)
    mock_get_retry_session()
Beispiel #30
0
    def __init__(self):
        self.dockerfile_images = DockerfileImages([])
        self.parent_images_digests = {}
        self.df_path = None
        self.df_dir = None
        self.git_dockerfile_path = None
        self.git_path = None
        self.image = None
        self.image_id = None
        self.source = StubSource()
        self.source.config = StubConfig()
        self.tasker = None
        self.original_df = None
        self.buildargs = {}

        self._inspection_data = None
        self._parent_inspection_data = {}