Пример #1
0
    def run_plugin_with_args(self, workflow, plugin_args):
        runner = PrePublishPluginsRunner(
            workflow, [{
                'name': PrePublishSquashPlugin.key,
                'args': plugin_args
            }])

        result = runner.run()
        assert result[PrePublishSquashPlugin.key] is None

        if self.output_path:
            assert workflow.data.exported_image_sequence == [{
                'md5sum':
                DUMMY_TARBALL['md5sum'],
                'sha256sum':
                DUMMY_TARBALL['sha256sum'],
                'size':
                DUMMY_TARBALL['size'],
                'type':
                IMAGE_TYPE_DOCKER_ARCHIVE,
                'path':
                self.output_path,
            }]
        else:
            assert workflow.data.exported_image_sequence == []
Пример #2
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResults
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf,
                                                    plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise

            build_result = self.builder.build()
            self.build_logs = build_result.logs

            self.build_failed = build_result.is_failed()

            if not build_result.is_failed():
                self.built_image_inspect = self.builder.inspect_built_image()

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf,
                                                        plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            if not build_result.is_failed():
                for registry in self.push_conf.docker_registries:
                    self.builder.push_built_image(registry.uri,
                                                  insecure=registry.insecure)

            postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf,
                                                      plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return build_result
        finally:
            self.source.remove_tmpdir()

            exit_runner = ExitPluginsRunner(self.builder.tasker, self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
Пример #3
0
def test_skip_plugin(caplog, docker_tasker, user_params):
    workflow = DockerBuildWorkflow(source=MOCK_SOURCE)
    workflow.user_params = {}
    setattr(workflow, 'builder', MockBuilder())

    runner = PrePublishPluginsRunner(docker_tasker, workflow,
                                     [{
                                         'name': FlatpakCreateOciPlugin.key,
                                         'args': {}
                                     }])

    runner.run()

    assert 'not flatpak build, skipping plugin' in caplog.text
Пример #4
0
    def run_plugin_with_args(self, plugin_args):
        runner = PrePublishPluginsRunner(
            self.tasker,
            self.workflow,
            [{'name': PrePublishSquashPlugin.key, 'args': plugin_args}]
        )

        result = runner.run()
        assert result[PrePublishSquashPlugin.key] is None

        assert self.workflow.exported_image_sequence == [{
            'md5sum': DUMMY_TARBALL['md5sum'],
            'sha256sum': DUMMY_TARBALL['sha256sum'],
            'size': DUMMY_TARBALL['size'],
            'path': self.output_path,
        }]
Пример #5
0
    def run_plugin_with_args(self, plugin_args):
        runner = PrePublishPluginsRunner(
            self.tasker,
            self.workflow,
            [{'name': PrePublishSquashPlugin.key, 'args': plugin_args}]
        )

        result = runner.run()
        assert result[PrePublishSquashPlugin.key] is None

        if self.output_path:
            assert self.workflow.exported_image_sequence == [{
                'md5sum': DUMMY_TARBALL['md5sum'],
                'sha256sum': DUMMY_TARBALL['sha256sum'],
                'size': DUMMY_TARBALL['size'],
                'type': IMAGE_TYPE_DOCKER_ARCHIVE,
                'path': self.output_path,
            }]
        else:
            assert self.workflow.exported_image_sequence == []
Пример #6
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResults
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.prebuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise

            build_result = self.builder.build()
            self.build_logs = build_result.logs

            self.build_failed = build_result.is_failed()

            if not build_result.is_failed():
                self.built_image_inspect = self.builder.inspect_built_image()

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(
                self.builder.tasker,
                self,
                self.prepublish_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            if not build_result.is_failed():
                for registry in self.push_conf.docker_registries:
                    self.builder.push_built_image(registry.uri,
                                                  insecure=registry.insecure)

            postbuild_runner = PostBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.postbuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return build_result
        finally:
            self.source.remove_tmpdir()

            exit_runner = ExitPluginsRunner(self.builder.tasker,
                                            self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, breakage, mock_flatpak):
    if not mock_flatpak:
        # Check that we actually have flatpak available
        have_flatpak = False
        try:
            output = subprocess.check_output(['flatpak', '--version'],
                                             universal_newlines=True)
            m = re.search('(\d+)\.(\d+)\.(\d+)', output)
            if m and (int(m.group(1)), int(m.group(2)), int(m.group(3))) >= (0, 9, 7):
                have_flatpak = True

        except (subprocess.CalledProcessError, OSError):
            pytest.skip(msg='flatpak not available')

        if not have_flatpak:
            return

    config = CONFIGS[config_name]

    if mock_flatpak:
        (flexmock(subprocess)
         .should_receive("check_call")
         .replace_with(mocked_check_call))

        (flexmock(subprocess)
         .should_receive("check_output")
         .replace_with(mocked_check_output))

    workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE)
    setattr(workflow, 'builder', X)
    setattr(workflow.builder, 'tasker', docker_tasker)

    filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
    os.mkdir(filesystem_dir)

    filesystem_contents = config['filesystem_contents']

    for path, contents in filesystem_contents.items():
        parts = path.split(':', 1)
        path = parts[0]
        mode = parts[1] if len(parts) == 2 else None

        fullpath = os.path.join(filesystem_dir, path[1:])
        parent_dir = os.path.dirname(fullpath)
        if not os.path.isdir(parent_dir):
            os.makedirs(parent_dir)

        if contents is None:
            os.mkdir(fullpath)
        else:
            with open(fullpath, 'wb') as f:
                f.write(contents)

        if mode is not None:
            os.chmod(fullpath, int(mode, 8))

    if breakage == 'no_runtime':
        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']
        mmd = Modulemd.Module.new_from_string(module_config['metadata'])

        # Clear out all dependencies. Setting via the property causes a crash
        # https://gitlab.gnome.org/GNOME/pygobject/issues/37
#        mmd.props.dependencies = [Modulemd.Dependencies()]
        mmd.set_dependencies([Modulemd.Dependencies()])

        module_config['metadata'] = mmd.dumps()

        expected_exception = 'Failed to identify runtime module'
    else:
        assert breakage is None
        expected_exception = None

    filesystem_tar = os.path.join(filesystem_dir, 'tar')
    with open(filesystem_tar, "wb") as f:
        with tarfile.TarFile(fileobj=f, mode='w') as tf:
            for f in os.listdir(filesystem_dir):
                tf.add(os.path.join(filesystem_dir, f), f)

    export_stream = open(filesystem_tar, "rb")

    def stream_to_generator(s):
        while True:
            # Yield small chunks to test the StreamAdapter code better
            buf = s.read(100)
            if len(buf) == 0:
                return
            yield buf

    export_generator = stream_to_generator(export_stream)

    (flexmock(docker_tasker.d)
     .should_receive('export')
     .with_args(CONTAINER_ID)
     .and_return(export_generator))

    (flexmock(docker_tasker.d.wrapped)
     .should_receive('create_container')
     .with_args(workflow.image, command=["/bin/bash"])
     .and_return({'Id': CONTAINER_ID}))
    (flexmock(docker_tasker.d.wrapped)
     .should_receive('remove_container')
     .with_args(CONTAINER_ID))

    setup_flatpak_source_info(workflow, config)

    runner = PrePublishPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': FlatpakCreateOciPlugin.key,
            'args': {}
        }]
    )

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex)
    else:
        runner.run()

        dir_metadata = workflow.exported_image_sequence[-2]
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        tar_metadata = workflow.exported_image_sequence[-1]
        assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR

        # Check that the expected files ended up in the flatpak

        if mock_flatpak:
            inspector = MockInspector(tmpdir, dir_metadata)
        else:
            inspector = DefaultInspector(tmpdir, dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        components = {c['name'] for c in workflow.image_components}
        for n in config['expected_components']:
            assert n in components
        for n in config['unexpected_components']:
            assert n not in components

        metadata_lines = inspector.cat_file('/metadata').split('\n')
        assert any(re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l)
                   for l in metadata_lines)
        assert any(re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l)
                   for l in metadata_lines)

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file('/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines

            assert 'name=org.gnome.eog' in metadata_lines
            assert 'tags=Viewer' in metadata_lines
            assert 'command=eog2' in metadata_lines
        elif config_name == 'runtime':  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'

            assert 'name=org.fedoraproject.Platform' in metadata_lines
        else:  # SDK
            assert 'name=org.fedoraproject.Sdk' in metadata_lines
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, breakage, mock_flatpak):
    if not mock_flatpak:
        # Check that we actually have flatpak available
        have_flatpak = False
        try:
            output = subprocess.check_output(['flatpak', '--version'],
                                             universal_newlines=True)
            m = re.search('(\d+)\.(\d+)\.(\d+)', output)
            if m and (int(m.group(1)), int(m.group(2)), int(m.group(3))) >= (0, 9, 7):
                have_flatpak = True

        except (subprocess.CalledProcessError, OSError):
            pass

        if not have_flatpak:
            return

    config = CONFIGS[config_name]

    if mock_flatpak:
        (flexmock(subprocess)
         .should_receive("check_call")
         .replace_with(mocked_check_call))

        (flexmock(subprocess)
         .should_receive("check_output")
         .replace_with(mocked_check_output))

    workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE)
    setattr(workflow, 'builder', X)
    setattr(workflow.builder, 'tasker', docker_tasker)

    filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
    os.mkdir(filesystem_dir)

    filesystem_contents = config['filesystem_contents']

    for path, contents in filesystem_contents.items():
        parts = path.split(':', 1)
        path = parts[0]
        mode = parts[1] if len(parts) == 2 else None

        fullpath = os.path.join(filesystem_dir, path[1:])
        parent_dir = os.path.dirname(fullpath)
        if not os.path.isdir(parent_dir):
            os.makedirs(parent_dir)

        if contents is None:
            os.mkdir(fullpath)
        else:
            with open(fullpath, 'w') as f:
                f.write(contents)

        if mode is not None:
            os.chmod(fullpath, int(mode, 8))

    if breakage == 'stray_component':
        fullpath = os.path.join(filesystem_dir, 'var/tmp/flatpak-build.rpm_qf')
        with open(fullpath, 'a') as f:
            f.write("bad-rpm;1.2.3;1.fc26;x86_64;0;42;sigmd5;0;42;1491914281;sigpgp;siggpg\n")
        expected_exception = 'bad-rpm'
    elif breakage == 'missing_component':
        fullpath = os.path.join(filesystem_dir, 'var/tmp/flatpak-build.rpm_qf')
        with open(fullpath, 'r') as f:
            with open(fullpath + '.tmp', 'w') as g:
                f.readline()
                g.write(f.read())
        os.rename(fullpath + '.tmp', fullpath)
        expected_exception = 'Installed set of packages does not match runtime profile'
    elif breakage == 'no_runtime':
        mmd = ModuleMetadata()

        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']
        mmd.loads(module_config['metadata'])
        del mmd.buildrequires['flatpak-runtime']
        module_config['metadata'] = mmd.dumps()
        expected_exception = 'Failed to identify runtime module'
    else:
        assert breakage is None
        expected_exception = None

    filesystem_tar = os.path.join(filesystem_dir, 'tar')
    with open(filesystem_tar, "wb") as f:
        with tarfile.TarFile(fileobj=f, mode='w') as tf:
            for f in os.listdir(filesystem_dir):
                tf.add(os.path.join(filesystem_dir, f), f)

    export_stream = open(filesystem_tar, "rb")

    (flexmock(docker_tasker.d.wrapped)
     .should_receive('create_container')
     .with_args(workflow.image)
     .and_return({'Id': CONTAINER_ID}))
    (flexmock(docker_tasker.d.wrapped)
     .should_receive('export')
     .with_args(CONTAINER_ID)
     .and_return(export_stream))
    (flexmock(docker_tasker.d.wrapped)
     .should_receive('remove_container')
     .with_args(CONTAINER_ID))

    modules = {}
    for name, module_config in config['modules'].items():
        mmd = ModuleMetadata()
        mmd.loads(module_config['metadata'])
        modules[name] = ModuleInfo(name,
                                   module_config['stream'],
                                   module_config['version'],
                                   mmd,
                                   module_config['rpms'])
    base_module = modules[config['base_module']]

    repo_url = 'http://odcs.example/composes/latest-odcs-42-1/compose/Temporary/$basearch/os/'
    compose_info = ComposeInfo(base_module.name + '-' + base_module.stream,
                               42, base_module,
                               modules,
                               repo_url)
    set_compose_info(workflow, compose_info)

    source = FlatpakSourceInfo(FLATPAK_APP_JSON,
                               compose_info)
    set_flatpak_source_info(workflow, source)

    runner = PrePublishPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': FlatpakCreateOciPlugin.key,
            'args': {}
        }]
    )

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex)
    else:
        runner.run()

        dir_metadata = workflow.exported_image_sequence[-2]
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        tar_metadata = workflow.exported_image_sequence[-1]
        assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR

        # Check that the expected files ended up in the flatpak

        if mock_flatpak:
            inspector = MockInspector(tmpdir, dir_metadata)
        else:
            inspector = DefaultInspector(tmpdir, dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        components = {c['name'] for c in workflow.image_components}
        for n in config['expected_components']:
            assert n in components
        for n in config['unexpected_components']:
            assert n not in components

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file('/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines
        else:  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'
Пример #9
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResult
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            signal.signal(signal.SIGTERM, self.throw_canceled_build_exception)
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self,
                                                    self.prebuild_plugins_conf,
                                                    plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            logger.info("running buildstep plugins")
            buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self,
                                                      self.buildstep_plugins_conf,
                                                      plugin_files=self.plugin_files)
            try:
                self.build_result = buildstep_runner.run()

                if self.build_result.is_failed():
                    raise PluginFailedException(self.build_result.fail_reason)
            except PluginFailedException as ex:
                self.builder.is_built = False
                logger.error('buildstep plugin failed: %s', ex)
                raise

            self.builder.is_built = True
            if self.build_result.is_image_available():
                self.builder.image_id = self.build_result.image_id

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self,
                                                        self.prepublish_plugins_conf,
                                                        plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            if self.build_result.is_image_available():
                self.built_image_inspect = self.builder.inspect_built_image()
                history = self.builder.tasker.d.history(self.builder.image_id)
                diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS]

                # diff_ids is ordered oldest first
                # history is ordered newest first
                # We want layer_sizes to be ordered oldest first
                self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']}
                                    for (diff_id, layer) in zip(diff_ids, reversed(history))]

            postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self,
                                                      self.postbuild_plugins_conf,
                                                      plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return self.build_result
        except Exception as ex:
            logger.debug("caught exception (%r) so running exit plugins", ex)
            raise
        finally:
            # We need to make sure all exit plugins are executed
            signal.signal(signal.SIGTERM, lambda *args: None)
            exit_runner = ExitPluginsRunner(self.builder.tasker, self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
                raise
            finally:
                self.source.remove_tmpdir()

            signal.signal(signal.SIGTERM, signal.SIG_DFL)
Пример #10
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResult
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            self.fs_watcher.start()
            signal.signal(signal.SIGTERM, self.throw_canceled_build_exception)
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self,
                                                    self.prebuild_plugins_conf,
                                                    plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            logger.info("running buildstep plugins")
            buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self,
                                                      self.buildstep_plugins_conf,
                                                      plugin_files=self.plugin_files)
            try:
                self.build_result = buildstep_runner.run()

                if self.build_result.is_failed():
                    raise PluginFailedException(self.build_result.fail_reason)
            except PluginFailedException as ex:
                self.builder.is_built = False
                logger.error('buildstep plugin failed: %s', ex)
                raise

            self.builder.is_built = True
            if self.build_result.is_image_available():
                self.builder.image_id = self.build_result.image_id

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self,
                                                        self.prepublish_plugins_conf,
                                                        plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            if self.build_result.is_image_available():
                self.built_image_inspect = self.builder.inspect_built_image()
                history = self.builder.tasker.d.history(self.builder.image_id)
                diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS]

                # diff_ids is ordered oldest first
                # history is ordered newest first
                # We want layer_sizes to be ordered oldest first
                self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']}
                                    for (diff_id, layer) in zip(diff_ids, reversed(history))]

            postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self,
                                                      self.postbuild_plugins_conf,
                                                      plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return self.build_result
        except Exception as ex:
            logger.debug("caught exception (%r) so running exit plugins", ex)
            raise
        finally:
            # We need to make sure all exit plugins are executed
            signal.signal(signal.SIGTERM, lambda *args: None)
            exit_runner = ExitPluginsRunner(self.builder.tasker, self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
                raise
            finally:
                self.source.remove_tmpdir()
                self.fs_watcher.finish()

            signal.signal(signal.SIGTERM, signal.SIG_DFL)
Пример #11
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResults
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            signal.signal(signal.SIGTERM, self.throw_canceled_build_exception)
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.prebuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            start_time = datetime.datetime.now()
            self.plugins_timestamps['dockerbuild'] = start_time.isoformat()

            build_result = self.builder.build()

            try:
                finish_time = datetime.datetime.now()
                duration = finish_time - start_time
                seconds = duration.total_seconds()
                logger.debug("build finished in %ds", seconds)
                self.plugins_durations['dockerbuild'] = seconds
            except Exception:
                logger.exception("failed to save build duration")

            self.build_logs = build_result.logs

            self.build_failed = build_result.is_failed()

            if build_result.is_failed():
                # The docker build failed. Finish here, just run the
                # exit plugins (from the 'finally:' block below).
                self.plugins_errors['dockerbuild'] = ''
                return build_result

            self.built_image_inspect = self.builder.inspect_built_image()

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(
                self.builder.tasker,
                self,
                self.prepublish_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            postbuild_runner = PostBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.postbuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return build_result
        finally:
            # We need to make sure all exit plugins are executed
            signal.signal(signal.SIGTERM, lambda *args: None)
            exit_runner = ExitPluginsRunner(self.builder.tasker,
                                            self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
                raise
            finally:
                self.source.remove_tmpdir()

            signal.signal(signal.SIGTERM, signal.SIG_DFL)
Пример #12
0
    def build_docker_image(self) -> None:
        """
        build docker image
        """
        print_version_of_tools()

        exception_being_handled = False
        # Make sure exit_runner is defined for finally block
        exit_runner = None
        try:
            self.fs_watcher.start()
            signal.signal(signal.SIGTERM, self.throw_canceled_build_exception)
            prebuild_runner = PreBuildPluginsRunner(
                self, self.plugins.prebuild, plugin_files=self.plugin_files)
            prepublish_runner = PrePublishPluginsRunner(
                self, self.plugins.prepublish, plugin_files=self.plugin_files)
            postbuild_runner = PostBuildPluginsRunner(
                self, self.plugins.postbuild, plugin_files=self.plugin_files)
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise

            # we are delaying initialization, because prebuild plugin reactor_config
            # might change build method
            buildstep_runner = BuildStepPluginsRunner(
                self, self.plugins.buildstep, plugin_files=self.plugin_files)

            logger.info("running buildstep plugins")
            try:
                buildstep_runner.run()
            except PluginFailedException as ex:
                logger.error('buildstep plugin failed: %s', ex)
                raise

            # run prepublish plugins
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise
        except Exception as ex:
            logger.debug("caught exception (%s) so running exit plugins",
                         exception_message(ex))
            exception_being_handled = True
            raise
        finally:
            # We need to make sure all exit plugins are executed
            signal.signal(signal.SIGTERM, lambda *args: None)

            exit_runner = ExitPluginsRunner(self,
                                            self.plugins.exit,
                                            keep_going=True,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)

                # raise exception only in case that there is no previous exception being already
                # handled to prevent replacing original exceptions (root cause) with exceptions
                # from exit plugins
                if not exception_being_handled:
                    raise ex
            finally:
                self.fs_watcher.finish()

            signal.signal(signal.SIGTERM, signal.SIG_DFL)
Пример #13
0
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name,
                            flatpak_metadata, breakage):
    # Check that we actually have flatpak available
    have_flatpak = False
    try:
        output = subprocess.check_output(['flatpak', '--version'],
                                         universal_newlines=True)
        m = re.search(r'(\d+)\.(\d+)\.(\d+)', output)
        if m and (int(m.group(1)), int(m.group(2)), int(
                m.group(3))) >= (0, 9, 7):
            have_flatpak = True

    except (subprocess.CalledProcessError, OSError):
        pytest.skip(msg='flatpak not available')

    if not have_flatpak:
        return

    config = CONFIGS[config_name]

    workflow = DockerBuildWorkflow(TEST_IMAGE,
                                   source={
                                       "provider": "git",
                                       "uri": "asd"
                                   })
    setattr(workflow, 'builder', X)
    X.df_path = write_docker_file(config, str(tmpdir))
    setattr(workflow.builder, 'tasker', docker_tasker)

    make_and_store_reactor_config_map(workflow, flatpak_metadata)

    filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
    os.mkdir(filesystem_dir)

    filesystem_contents = config['filesystem_contents']

    for path, contents in filesystem_contents.items():
        parts = path.split(':', 1)
        path = parts[0]
        mode = parts[1] if len(parts) == 2 else None

        fullpath = os.path.join(filesystem_dir, path[1:])
        parent_dir = os.path.dirname(fullpath)
        if not os.path.isdir(parent_dir):
            os.makedirs(parent_dir)

        if contents is None:
            os.mkdir(fullpath)
        else:
            with open(fullpath, 'wb') as f:
                f.write(contents)

        if mode is not None:
            os.chmod(fullpath, int(mode, 8))

    if breakage == 'no_runtime':
        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']

        mmd = Modulemd.ModuleStream.read_string(module_config['metadata'],
                                                strict=True)
        mmd.clear_dependencies()
        mmd.add_dependencies(Modulemd.Dependencies())
        mmd_index = Modulemd.ModuleIndex.new()
        mmd_index.add_module_stream(mmd)
        module_config['metadata'] = mmd_index.dump_to_string()

        expected_exception = 'Failed to identify runtime module'
    else:
        assert breakage is None
        expected_exception = None

    filesystem_tar = os.path.join(filesystem_dir, 'tar')
    with open(filesystem_tar, "wb") as f:
        with tarfile.TarFile(fileobj=f, mode='w') as tf:
            for f in os.listdir(filesystem_dir):
                tf.add(os.path.join(filesystem_dir, f), f)

    export_stream = open(filesystem_tar, "rb")

    def stream_to_generator(s):
        while True:
            # Yield small chunks to test the StreamAdapter code better
            buf = s.read(100)
            if len(buf) == 0:
                return
            yield buf

    export_generator = stream_to_generator(export_stream)

    (flexmock(
        docker_tasker.tasker).should_receive('export_container').with_args(
            CONTAINER_ID).and_return(export_generator))

    (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
        'create_container').with_args(workflow.image,
                                      command=["/bin/bash"]).and_return(
                                          {'Id': CONTAINER_ID}))

    (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
        'remove_container').with_args(CONTAINER_ID, force=False))

    setup_flatpak_source_info(workflow, config)

    runner = PrePublishPluginsRunner(docker_tasker, workflow,
                                     [{
                                         'name': FlatpakCreateOciPlugin.key,
                                         'args': {}
                                     }])

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex.value)
    else:
        runner.run()

        dir_metadata = workflow.exported_image_sequence[-2]
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        tar_metadata = workflow.exported_image_sequence[-1]
        assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR

        # Check that the correct labels and annotations were written

        labels, annotations = load_labels_and_annotations(dir_metadata)

        if config_name == 'app':
            assert labels['name'] == 'eog'
            assert labels['com.redhat.component'] == 'eog'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170629213428'
        elif config_name == 'runtime':  # runtime
            assert labels['name'] == 'flatpak-runtime'
            assert labels[
                'com.redhat.component'] == 'flatpak-runtime-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'
        else:
            assert labels['name'] == 'flatpak-sdk'
            assert labels['com.redhat.component'] == 'flatpak-sdk-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'

        if flatpak_metadata == 'annotations':
            assert annotations.get(
                'org.flatpak.ref') == config['expected_ref_name']
            assert 'org.flatpak.ref' not in labels
        elif flatpak_metadata == 'labels':
            assert 'org.flatpak.ref' not in annotations
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']
        elif flatpak_metadata == 'both':
            assert annotations.get(
                'org.flatpak.ref') == config['expected_ref_name']
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']

        # Check that the expected files ended up in the flatpak

        # Flatpak versions before 1.6 require annotations to be present, since we don't
        # require such a new Flatpak, skip remaining checks in the label-only case
        if flatpak_metadata == 'labels':
            return

        inspector = DefaultInspector(tmpdir, dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        components = {c['name'] for c in workflow.image_components}  # noqa:E501; pylint: disable=not-an-iterable
        for n in config['expected_components']:
            assert n in components
        for n in config['unexpected_components']:
            assert n not in components

        metadata_lines = inspector.cat_file('/metadata').split('\n')
        assert any(
            re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l)
            for l in metadata_lines)
        assert any(
            re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l)
            for l in metadata_lines)

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file(
                '/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines

            assert 'name=org.gnome.eog' in metadata_lines
            assert 'tags=Viewer' in metadata_lines
            assert 'command=eog2' in metadata_lines
        elif config_name == 'runtime':  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'

            assert 'name=org.fedoraproject.Platform' in metadata_lines
        else:  # SDK
            assert 'name=org.fedoraproject.Sdk' in metadata_lines
Пример #14
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResults
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.prebuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            build_result = self.builder.build()
            self.build_logs = build_result.logs

            self.build_failed = build_result.is_failed()

            if build_result.is_failed():
                # The docker build failed. Finish here, just run the
                # exit plugins (from the 'finally:' block below).
                return build_result

            self.built_image_inspect = self.builder.inspect_built_image()

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(
                self.builder.tasker,
                self,
                self.prepublish_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            postbuild_runner = PostBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.postbuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return build_result
        finally:
            exit_runner = ExitPluginsRunner(self.builder.tasker,
                                            self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
            finally:
                self.source.remove_tmpdir()
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, breakage,
                            mock_flatpak):
    if not mock_flatpak:
        # Check that we actually have flatpak available
        have_flatpak = False
        try:
            output = subprocess.check_output(['flatpak', '--version'],
                                             universal_newlines=True)
            m = re.search('(\d+)\.(\d+)\.(\d+)', output)
            if m and (int(m.group(1)), int(m.group(2)), int(
                    m.group(3))) >= (0, 9, 7):
                have_flatpak = True

        except (subprocess.CalledProcessError, OSError):
            pass

        if not have_flatpak:
            return

    config = CONFIGS[config_name]

    if mock_flatpak:
        (flexmock(subprocess).should_receive("check_call").replace_with(
            mocked_check_call))

        (flexmock(subprocess).should_receive("check_output").replace_with(
            mocked_check_output))

    workflow = DockerBuildWorkflow({
        "provider": "git",
        "uri": "asd"
    }, TEST_IMAGE)
    setattr(workflow, 'builder', X)
    setattr(workflow.builder, 'tasker', docker_tasker)

    filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
    os.mkdir(filesystem_dir)

    filesystem_contents = config['filesystem_contents']

    for path, contents in filesystem_contents.items():
        parts = path.split(':', 1)
        path = parts[0]
        mode = parts[1] if len(parts) == 2 else None

        fullpath = os.path.join(filesystem_dir, path[1:])
        parent_dir = os.path.dirname(fullpath)
        if not os.path.isdir(parent_dir):
            os.makedirs(parent_dir)

        if contents is None:
            os.mkdir(fullpath)
        else:
            with open(fullpath, 'w') as f:
                f.write(contents)

        if mode is not None:
            os.chmod(fullpath, int(mode, 8))

    if breakage == 'no_runtime':
        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']
        mmd = Modulemd.Module.new_from_string(module_config['metadata'])

        # Clear out all dependencies. Setting via the property causes a crash
        # https://gitlab.gnome.org/GNOME/pygobject/issues/37
        #        mmd.props.dependencies = [Modulemd.Dependencies()]
        mmd.set_dependencies([Modulemd.Dependencies()])

        module_config['metadata'] = mmd.dumps()

        expected_exception = 'Failed to identify runtime module'
    else:
        assert breakage is None
        expected_exception = None

    filesystem_tar = os.path.join(filesystem_dir, 'tar')
    with open(filesystem_tar, "wb") as f:
        with tarfile.TarFile(fileobj=f, mode='w') as tf:
            for f in os.listdir(filesystem_dir):
                tf.add(os.path.join(filesystem_dir, f), f)

    export_stream = open(filesystem_tar, "rb")

    def stream_to_generator(s):
        while True:
            # Yield small chunks to test the StreamAdapter code better
            buf = s.read(100)
            if len(buf) == 0:
                return
            yield buf

    export_generator = stream_to_generator(export_stream)

    (flexmock(
        docker_tasker.d.wrapped).should_receive('create_container').with_args(
            workflow.image,
            command=["/bin/bash"]).and_return({'Id': CONTAINER_ID}))
    (flexmock(docker_tasker.d.wrapped).should_receive('export').with_args(
        CONTAINER_ID).and_return(export_generator))
    (flexmock(docker_tasker.d.wrapped).should_receive(
        'remove_container').with_args(CONTAINER_ID))

    setup_flatpak_source_info(workflow, config)

    runner = PrePublishPluginsRunner(docker_tasker, workflow,
                                     [{
                                         'name': FlatpakCreateOciPlugin.key,
                                         'args': {}
                                     }])

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex)
    else:
        runner.run()

        dir_metadata = workflow.exported_image_sequence[-2]
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        tar_metadata = workflow.exported_image_sequence[-1]
        assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR

        # Check that the expected files ended up in the flatpak

        if mock_flatpak:
            inspector = MockInspector(tmpdir, dir_metadata)
        else:
            inspector = DefaultInspector(tmpdir, dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        components = {c['name'] for c in workflow.image_components}
        for n in config['expected_components']:
            assert n in components
        for n in config['unexpected_components']:
            assert n not in components

        metadata_lines = inspector.cat_file('/metadata').split('\n')
        assert any(
            re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l)
            for l in metadata_lines)
        assert any(
            re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l)
            for l in metadata_lines)

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file(
                '/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines

            assert 'name=org.gnome.eog' in metadata_lines
            assert 'tags=Viewer' in metadata_lines
            assert 'command=eog2' in metadata_lines
        elif config_name == 'runtime':  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'

            assert 'name=org.fedoraproject.Platform' in metadata_lines
        else:  # SDK
            assert 'name=org.fedoraproject.Sdk' in metadata_lines
Пример #16
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResult
        """
        exception_being_handled = False
        self.builder = InsideBuilder(self.source, self.image)
        # Make sure exit_runner is defined for finally block
        exit_runner = None
        try:
            self.fs_watcher.start()
            signal.signal(signal.SIGTERM, self.throw_canceled_build_exception)
            prebuild_runner = PreBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.prebuild_plugins_conf,
                plugin_files=self.plugin_files)
            prepublish_runner = PrePublishPluginsRunner(
                self.builder.tasker,
                self,
                self.prepublish_plugins_conf,
                plugin_files=self.plugin_files)
            postbuild_runner = PostBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.postbuild_plugins_conf,
                plugin_files=self.plugin_files)
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            # we are delaying initialization, because prebuild plugin reactor_config
            # might change build method
            buildstep_runner = BuildStepPluginsRunner(
                self.builder.tasker,
                self,
                self.buildstep_plugins_conf,
                plugin_files=self.plugin_files)

            logger.info("running buildstep plugins")
            try:
                self.build_result = buildstep_runner.run()

                if self.build_result.is_failed():
                    raise PluginFailedException(self.build_result.fail_reason)
            except PluginFailedException as ex:
                self.builder.is_built = False
                logger.error('buildstep plugin failed: %s', ex)
                raise

            self.builder.is_built = True
            if self.build_result.is_image_available():
                self.builder.image_id = self.build_result.image_id

            # run prepublish plugins
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            if self.build_result.is_image_available():
                self.built_image_inspect = self.builder.inspect_built_image()
                history = self.builder.tasker.get_image_history(
                    self.builder.image_id)
                diff_ids = self.built_image_inspect[INSPECT_ROOTFS][
                    INSPECT_ROOTFS_LAYERS]

                # diff_ids is ordered oldest first
                # history is ordered newest first
                # We want layer_sizes to be ordered oldest first
                self.layer_sizes = [{
                    "diff_id": diff_id,
                    "size": layer['Size']
                } for (diff_id, layer) in zip(diff_ids, reversed(history))]

            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return self.build_result
        except Exception as ex:
            logger.debug("caught exception (%s) so running exit plugins",
                         exception_message(ex))
            exception_being_handled = True
            raise
        finally:
            # We need to make sure all exit plugins are executed
            signal.signal(signal.SIGTERM, lambda *args: None)

            exit_runner = ExitPluginsRunner(self.builder.tasker,
                                            self,
                                            self.exit_plugins_conf,
                                            keep_going=True,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)

                # raise exception only in case that there is no previous exception being already
                # handled to prevent replacing original exceptions (root cause) with exceptions
                # from exit plugins
                if not exception_being_handled:
                    raise ex
            finally:
                self.source.remove_tmpdir()
                self.fs_watcher.finish()

            signal.signal(signal.SIGTERM, signal.SIG_DFL)
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, breakage,
                            mock_flatpak):
    if not mock_flatpak:
        # Check that we actually have flatpak available
        have_flatpak = False
        try:
            output = subprocess.check_output(['flatpak', '--version'],
                                             universal_newlines=True)
            m = re.search('(\d+)\.(\d+)\.(\d+)', output)
            if m and (int(m.group(1)), int(m.group(2)), int(
                    m.group(3))) >= (0, 9, 7):
                have_flatpak = True

        except (subprocess.CalledProcessError, OSError):
            pass

        if not have_flatpak:
            return

    config = CONFIGS[config_name]

    if mock_flatpak:
        (flexmock(subprocess).should_receive("check_call").replace_with(
            mocked_check_call))

        (flexmock(subprocess).should_receive("check_output").replace_with(
            mocked_check_output))

    workflow = DockerBuildWorkflow({
        "provider": "git",
        "uri": "asd"
    }, TEST_IMAGE)
    setattr(workflow, 'builder', X)
    setattr(workflow.builder, 'tasker', docker_tasker)

    filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
    os.mkdir(filesystem_dir)

    filesystem_contents = config['filesystem_contents']

    for path, contents in filesystem_contents.items():
        parts = path.split(':', 1)
        path = parts[0]
        mode = parts[1] if len(parts) == 2 else None

        fullpath = os.path.join(filesystem_dir, path[1:])
        parent_dir = os.path.dirname(fullpath)
        if not os.path.isdir(parent_dir):
            os.makedirs(parent_dir)

        if contents is None:
            os.mkdir(fullpath)
        else:
            with open(fullpath, 'w') as f:
                f.write(contents)

        if mode is not None:
            os.chmod(fullpath, int(mode, 8))

    if breakage == 'stray_component':
        fullpath = os.path.join(filesystem_dir, 'var/tmp/flatpak-build.rpm_qf')
        with open(fullpath, 'a') as f:
            f.write(
                "bad-rpm;1.2.3;1.fc26;x86_64;0;42;sigmd5;0;42;1491914281;sigpgp;siggpg\n"
            )
        expected_exception = 'bad-rpm'
    elif breakage == 'missing_component':
        fullpath = os.path.join(filesystem_dir, 'var/tmp/flatpak-build.rpm_qf')
        with open(fullpath, 'r') as f:
            with open(fullpath + '.tmp', 'w') as g:
                f.readline()
                g.write(f.read())
        os.rename(fullpath + '.tmp', fullpath)
        expected_exception = 'Installed set of packages does not match runtime profile'
    elif breakage == 'no_runtime':
        mmd = ModuleMetadata()

        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']
        mmd.loads(module_config['metadata'])
        del mmd.buildrequires['flatpak-runtime']
        module_config['metadata'] = mmd.dumps()
        expected_exception = 'Failed to identify runtime module'
    else:
        assert breakage is None
        expected_exception = None

    filesystem_tar = os.path.join(filesystem_dir, 'tar')
    with open(filesystem_tar, "wb") as f:
        with tarfile.TarFile(fileobj=f, mode='w') as tf:
            for f in os.listdir(filesystem_dir):
                tf.add(os.path.join(filesystem_dir, f), f)

    export_stream = open(filesystem_tar, "rb")

    (flexmock(
        docker_tasker.d.wrapped).should_receive('create_container').with_args(
            workflow.image).and_return({'Id': CONTAINER_ID}))
    (flexmock(docker_tasker.d.wrapped).should_receive('export').with_args(
        CONTAINER_ID).and_return(export_stream))
    (flexmock(docker_tasker.d.wrapped).should_receive(
        'remove_container').with_args(CONTAINER_ID))

    modules = {}
    for name, module_config in config['modules'].items():
        mmd = ModuleMetadata()
        mmd.loads(module_config['metadata'])
        modules[name] = ModuleInfo(name, module_config['stream'],
                                   module_config['version'], mmd,
                                   module_config['rpms'])
    base_module = modules[config['base_module']]

    repo_url = 'http://odcs.example/composes/latest-odcs-42-1/compose/Temporary/$basearch/os/'
    compose_info = ComposeInfo(base_module.name + '-' + base_module.stream, 42,
                               base_module, modules, repo_url)
    set_compose_info(workflow, compose_info)

    source = FlatpakSourceInfo(FLATPAK_APP_JSON, compose_info)
    set_flatpak_source_info(workflow, source)

    runner = PrePublishPluginsRunner(docker_tasker, workflow,
                                     [{
                                         'name': FlatpakCreateOciPlugin.key,
                                         'args': {}
                                     }])

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex)
    else:
        runner.run()

        dir_metadata = workflow.exported_image_sequence[-2]
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        tar_metadata = workflow.exported_image_sequence[-1]
        assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR

        # Check that the expected files ended up in the flatpak

        if mock_flatpak:
            inspector = MockInspector(tmpdir, dir_metadata)
        else:
            inspector = DefaultInspector(tmpdir, dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        components = {c['name'] for c in workflow.image_components}
        for n in config['expected_components']:
            assert n in components
        for n in config['unexpected_components']:
            assert n not in components

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file(
                '/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines
        else:  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'
Пример #18
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResult
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            signal.signal(signal.SIGTERM, self.throw_canceled_build_exception)
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.prebuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            logger.info("running buildstep plugins")
            buildstep_runner = BuildStepPluginsRunner(
                self.builder.tasker,
                self,
                self.buildstep_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                self.build_result = buildstep_runner.run()

                if self.build_result.is_failed():
                    raise PluginFailedException(self.build_result.fail_reason)
            except PluginFailedException as ex:
                self.builder.is_built = False
                logger.error('buildstep plugin failed: %s', ex)
                raise

            self.builder.is_built = True
            if self.build_result.is_image_available():
                self.builder.image_id = self.build_result.image_id
                self.built_image_inspect = self.builder.inspect_built_image()

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(
                self.builder.tasker,
                self,
                self.prepublish_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            postbuild_runner = PostBuildPluginsRunner(
                self.builder.tasker,
                self,
                self.postbuild_plugins_conf,
                plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return self.build_result
        except Exception as ex:
            logger.debug("caught exception (%r) so running exit plugins", ex)
            raise
        finally:
            # We need to make sure all exit plugins are executed
            signal.signal(signal.SIGTERM, lambda *args: None)
            exit_runner = ExitPluginsRunner(self.builder.tasker,
                                            self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
                raise
            finally:
                self.source.remove_tmpdir()

            signal.signal(signal.SIGTERM, signal.SIG_DFL)
Пример #19
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResults
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf,
                                                    plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            build_result = self.builder.build()
            self.build_logs = build_result.logs

            self.build_failed = build_result.is_failed()

            if build_result.is_failed():
                # The docker build failed. Finish here, just run the
                # exit plugins (from the 'finally:' block below).
                return build_result

            self.built_image_inspect = self.builder.inspect_built_image()

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf,
                                                        plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf,
                                                      plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return build_result
        finally:
            exit_runner = ExitPluginsRunner(self.builder.tasker, self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
            finally:
                self.source.remove_tmpdir()
Пример #20
0
    def build_docker_image(self):
        """
        build docker image

        :return: BuildResults
        """
        self.builder = InsideBuilder(self.source, self.image)
        try:
            # time to run pre-build plugins, so they can access cloned repo
            logger.info("running pre-build plugins")
            prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf,
                                                    plugin_files=self.plugin_files)
            try:
                prebuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prebuild plugins failed: %s", ex)
                raise
            except AutoRebuildCanceledException as ex:
                logger.info(str(ex))
                self.autorebuild_canceled = True
                raise

            start_time = datetime.datetime.now()
            self.plugins_timestamps['dockerbuild'] = start_time.isoformat()

            build_result = self.builder.build()

            try:
                finish_time = datetime.datetime.now()
                duration = finish_time - start_time
                seconds = duration.total_seconds()
                logger.debug("build finished in %ds", seconds)
                self.plugins_durations['dockerbuild'] = seconds
            except Exception:
                logger.exception("failed to save build duration")

            self.build_logs = build_result.logs

            self.build_failed = build_result.is_failed()

            if build_result.is_failed():
                # The docker build failed. Finish here, just run the
                # exit plugins (from the 'finally:' block below).
                self.plugins_errors['dockerbuild'] = ''
                return build_result

            self.built_image_inspect = self.builder.inspect_built_image()

            # run prepublish plugins
            prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf,
                                                        plugin_files=self.plugin_files)
            try:
                prepublish_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more prepublish plugins failed: %s", ex)
                raise

            postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf,
                                                      plugin_files=self.plugin_files)
            try:
                postbuild_runner.run()
            except PluginFailedException as ex:
                logger.error("one or more postbuild plugins failed: %s", ex)
                raise

            return build_result
        finally:
            exit_runner = ExitPluginsRunner(self.builder.tasker, self,
                                            self.exit_plugins_conf,
                                            plugin_files=self.plugin_files)
            try:
                exit_runner.run(keep_going=True)
            except PluginFailedException as ex:
                logger.error("one or more exit plugins failed: %s", ex)
                raise
            finally:
                self.source.remove_tmpdir()