Beispiel #1
0
    def _load_source(self):
        flatpak_yaml = self.workflow.source.config.flatpak

        compose_info = get_compose_info(self.workflow)
        if compose_info is None:
            raise RuntimeError(
                "resolve_module_compose must be run before flatpak_create_dockerfile"
            )

        return FlatpakSourceInfo(flatpak_yaml, compose_info)
    def _load_source(self):
        flatpak_path = os.path.join(self.workflow.builder.df_dir, FLATPAK_FILENAME)
        with open(flatpak_path, 'r') as fp:
            flatpak_json = json.load(fp)

        compose_info = get_compose_info(self.workflow)
        if compose_info is None:
            raise RuntimeError(
                "resolve_module_compose must be run before flatpak_create_dockerfile")

        return FlatpakSourceInfo(flatpak_json, compose_info)
    def _load_source(self):
        flatpak_path = os.path.join(self.workflow.builder.df_dir,
                                    FLATPAK_FILENAME)
        with open(flatpak_path, 'r') as fp:
            flatpak_json = json.load(fp)

        compose_info = get_compose_info(self.workflow)
        if compose_info is None:
            raise RuntimeError(
                "resolve_module_compose must be run before flatpak_create_dockerfile"
            )

        return FlatpakSourceInfo(flatpak_json, compose_info)
    def _load_source(self):
        flatpak_yaml = self.workflow.source.config.flatpak

        compose_info = get_compose_info(self.workflow)
        if compose_info is None:
            raise RuntimeError(
                "resolve_module_compose must be run before flatpak_create_dockerfile"
            )

        module_spec = ModuleSpec.from_str(compose_info.source_spec)

        return FlatpakSourceInfo(flatpak_yaml, compose_info.modules,
                                 compose_info.base_module, module_spec.profile)
    def _load_source(self):
        flatpak_yaml = self.workflow.source.config.flatpak

        compose_info = get_compose_info(self.workflow)
        if compose_info is None:
            raise RuntimeError(
                "resolve_module_compose must be run before flatpak_create_dockerfile")

        module_spec = ModuleSpec.from_str(compose_info.source_spec)

        return FlatpakSourceInfo(flatpak_yaml,
                                 compose_info.modules,
                                 compose_info.base_module,
                                 module_spec.profile)
    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels)

        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {
            'image': {'autorebuild': is_rebuild(self.workflow)},
            'submitter': self.koji_session.getLoggedInUser().get('name'),
        }

        koji_task_owner = None
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = koji_task_id = int(koji_task_id)
                koji_task_owner = get_koji_task_owner(self.koji_session, koji_task_id).get('name')
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            extra['image']['media_types'] = sorted(list(set(pulp_pull_results)))

        # append parent builds and parent_build_id from koji parent
        extra['image'].update(get_parent_image_koji_data(self.workflow))

        # Append isolated build flag
        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            compose_info = get_compose_info(self.workflow)
            koji_metadata = compose_info.koji_metadata()
            koji_metadata['flatpak'] = True
            extra['image'].update(koji_metadata)

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        builder = FlatpakBuilder(source, None, None)

        builder.precheck()

        # Create the dockerfile

        module_info = source.base_module

        # We need to enable all the modules other than the platform pseudo-module
        modules_str = ' '.join(builder.get_enable_modules())

        install_packages_str = ' '.join(builder.get_install_packages())

        name = source.flatpak_yaml.get('name', module_info.name)
        component = source.flatpak_yaml.get('component', module_info.name)

        df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(DOCKERFILE_TEMPLATE.format(name=name,
                                                component=component,
                                                stream=module_info.stream.replace('-', '_'),
                                                version=module_info.version,
                                                base_image=self.base_image,
                                                modules=modules_str,
                                                packages=install_packages_str,
                                                rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        includepkgs = builder.get_includepkgs()
        includepkgs_path = os.path.join(self.workflow.builder.df_dir, 'atomic-reactor-includepkgs')
        with open(includepkgs_path, 'w') as f:
            f.write('includepkgs = ' + ','.join(includepkgs) + '\n')

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir, "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            f.write(builder.get_cleanup_script())
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        compose_info = get_compose_info(self.workflow)

        repo = {
            'name': repo_name,
            'baseurl': compose_info.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = YumRepo(os.path.join(YUM_REPOS_DIR, repo_name)).dst_filename
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)
Beispiel #8
0
def test_resolve_module_compose(tmpdir, docker_tasker, compose_ids, modules,
                                signing_intent, signing_intent_source,
                                sigkeys):
    secrets_path = os.path.join(str(tmpdir), "secret")
    os.mkdir(secrets_path)
    with open(os.path.join(secrets_path, "token"), "w") as f:
        f.write("green_eggs_and_ham")

    if modules is not None:
        data = "compose:\n"
        data += "    modules:\n"
        for mod in modules:
            data += "    - {}\n".format(mod)
        if signing_intent_source == 'container_yaml':
            data += '    signing_intent: ' + signing_intent
        tmpdir.join(REPO_CONTAINER_CONFIG).write(data)

    module = None
    if modules:
        module = modules[0]

    workflow = mock_workflow(tmpdir)
    mock_get_retry_session()
    mock_koji_session()

    def handle_composes_post(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if isinstance(request.body, six.text_type):
            body = request.body
        else:
            body = request.body.decode()
        body_json = json.loads(body)
        assert body_json['source']['type'] == 'module'
        assert body_json['source']['source'] == module
        assert body_json['source']['sigkeys'] == sigkeys
        assert body_json['arches'] == ['ppc64le', 'x86_64']
        return (200, {}, compose_json(0, 'wait'))

    responses.add_callback(responses.POST,
                           ODCS_URL + '/composes/',
                           content_type='application/json',
                           callback=handle_composes_post)

    state = {'count': 1}

    def handle_composes_get(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if state['count'] == 1:
            response_json = compose_json(1, 'generating')
        else:
            response_json = compose_json(2, 'done')
        state['count'] += 1

        return (200, {}, response_json)

    responses.add_callback(responses.GET,
                           ODCS_URL + '/composes/84',
                           content_type='application/json',
                           callback=handle_composes_get)

    args = {
        'odcs_url': ODCS_URL,
        'odcs_openidc_secret_path': secrets_path,
        'compose_ids': compose_ids
    }

    if signing_intent_source == 'command_line':
        args['signing_intent'] = signing_intent

    workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
    workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\
        ReactorConfig({'version': 1,
                       'odcs': {'api_url': ODCS_URL,
                                'auth': {'openidc_dir': secrets_path},
                                'signing_intents': [
                                    {
                                        'name': 'unsigned',
                                        'keys': [],
                                    },
                                    {
                                        'name': 'release',
                                        'keys': ['R123', 'R234'],
                                    },
                                    {
                                        'name': 'beta',
                                        'keys': ['R123', 'B456', 'B457'],
                                    },
                                ],
                                'default_signing_intent': 'unsigned'},
                       'koji':  {'auth': {},
                                 'hub_url': 'https://koji.example.com/hub'}})

    runner = PreBuildPluginsRunner(docker_tasker, workflow,
                                   [{
                                       'name': ResolveModuleComposePlugin.key,
                                       'args': args
                                   }])

    if modules is None:
        with pytest.raises(PluginFailedException) as exc_info:
            runner.run()
        assert '"compose" config in container.yaml is required ' in str(
            exc_info.value)
    elif not modules:
        with pytest.raises(PluginFailedException) as exc_info:
            runner.run()
        assert '"compose" config has no modules' in str(exc_info.value)
    else:
        runner.run()

        compose_info = get_compose_info(workflow)

        assert compose_info.compose_id == 84
        assert compose_info.base_module.name == MODULE_NAME
        assert compose_info.base_module.stream == MODULE_STREAM
        assert compose_info.base_module.version == MODULE_VERSION
        assert compose_info.base_module.mmd.props.summary == 'Eye of GNOME Application Module'
        assert compose_info.base_module.rpms == [
            'eog-0:3.28.3-1.module_2123+73a9ef6f.src.rpm',
            'eog-0:3.28.3-1.module_2123+73a9ef6f.x86_64.rpm',
            'eog-0:3.28.3-1.module_2123+73a9ef6f.ppc64le.rpm',
        ]
Beispiel #9
0
def test_resolve_module_compose(
        tmpdir,
        docker_tasker,
        compose_ids,
        modules,  # noqa
        reactor_config_map):
    secrets_path = os.path.join(str(tmpdir), "secret")
    os.mkdir(secrets_path)
    with open(os.path.join(secrets_path, "token"), "w") as f:
        f.write("green_eggs_and_ham")

    if modules is not None:
        data = "compose:\n"
        data += "    modules:\n"
        for mod in modules:
            data += "    - {}\n".format(mod)
        tmpdir.join(REPO_CONTAINER_CONFIG).write(data)

    module = None
    if modules:
        module = modules[0]

    workflow = mock_workflow(tmpdir)
    mock_get_retry_session()

    def handle_composes_post(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if isinstance(request.body, six.text_type):
            body = request.body
        else:
            body = request.body.decode()
        body_json = json.loads(body)
        assert body_json['source']['type'] == 'module'
        assert body_json['source']['source'] == module
        return (200, {}, compose_json(0, 'wait'))

    responses.add_callback(responses.POST,
                           ODCS_URL + '/composes/',
                           content_type='application/json',
                           callback=handle_composes_post)

    state = {'count': 1}

    def handle_composes_get(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if state['count'] == 1:
            response_json = compose_json(1, 'generating')
        else:
            response_json = compose_json(2, 'done')
        state['count'] += 1

        return (200, {}, response_json)

    responses.add_callback(responses.GET,
                           ODCS_URL + '/composes/84',
                           content_type='application/json',
                           callback=handle_composes_get)

    def handle_unreleasedvariants(request):
        query = parse_qs(urlparse(request.url).query)

        assert query['variant_id'] == [MODULE_NAME]
        assert query['variant_version'] == [MODULE_STREAM]
        assert query['variant_release'] == [MODULE_VERSION]

        return (200, {}, json.dumps(LATEST_VERSION_JSON))

    responses.add_callback(responses.GET,
                           PDC_URL + '/unreleasedvariants/',
                           content_type='application/json',
                           callback=handle_unreleasedvariants)

    args = {
        'odcs_url': ODCS_URL,
        'odcs_openidc_secret_path': secrets_path,
        'pdc_url': PDC_URL,
        'compose_ids': compose_ids
    }

    if reactor_config_map:
        workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
        workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\
            ReactorConfig({'version': 1,
                           'odcs': {'api_url': ODCS_URL,
                                    'auth': {'openidc_dir': secrets_path}},
                           'pdc': {'api_url': PDC_URL}})

    runner = PreBuildPluginsRunner(docker_tasker, workflow,
                                   [{
                                       'name': ResolveModuleComposePlugin.key,
                                       'args': args
                                   }])

    if modules is None:
        with pytest.raises(PluginFailedException) as exc_info:
            runner.run()
        assert '"compose" config in container.yaml is required ' in str(
            exc_info.value)
    elif not modules:
        with pytest.raises(PluginFailedException) as exc_info:
            runner.run()
        assert '"compose" config has no modules' in str(exc_info.value)
    else:
        runner.run()

        compose_info = get_compose_info(workflow)

        assert compose_info.compose_id == 84
        assert compose_info.base_module.name == MODULE_NAME
        assert compose_info.base_module.stream == MODULE_STREAM
        assert compose_info.base_module.version == MODULE_VERSION
        assert compose_info.base_module.mmd.props.summary == 'Eye of GNOME Application Module'
    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(
            df_parser(self.workflow.builder.df_path,
                      workflow=self.workflow).labels)

        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {
            'image': {
                'autorebuild': is_rebuild(self.workflow)
            },
            'submitter': self.koji_session.getLoggedInUser().get('name'),
        }

        koji_task_owner = None
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = koji_task_id = int(
                    koji_task_id)
                koji_task_owner = get_koji_task_owner(self.koji_session,
                                                      koji_task_id).get('name')
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error(
                    "%s: expected filesystem-koji-task-id in result",
                    AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r",
                                   fs_task_id,
                                   exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.postbuild_results.get(
            PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            extra['image']['media_types'] = sorted(list(
                set(pulp_pull_results)))

        # append parent builds and parent_build_id from koji parent
        extra['image'].update(get_parent_image_koji_data(self.workflow))

        # Append isolated build flag
        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(
                help_result, dict
        ) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s",
                               help_result)

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            compose_info = get_compose_info(self.workflow)
            koji_metadata = compose_info.koji_metadata()
            koji_metadata['flatpak'] = True
            extra['image'].update(koji_metadata)

        resolve_comp_result = self.workflow.prebuild_results.get(
            PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids':
                [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent':
                resolve_comp_result['signing_intent'],
                'signing_intent_overridden':
                resolve_comp_result['signing_intent_overridden'],
            }

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build
Beispiel #11
0
    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)
        extra = {'image': {}}

        if not self.source_build:
            labels = Labels(df_parser(self.workflow.builder.df_path,
                                      workflow=self.workflow).labels)
            _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
            _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
            _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

            source = self.workflow.source
            if not isinstance(source, GitSource):
                raise RuntimeError('git source required')

            extra['image']['autorebuild'] = is_rebuild(self.workflow)
            if self.workflow.triggered_after_koji_task:
                extra['image']['triggered_after_koji_task'] =\
                    self.workflow.triggered_after_koji_task

            try:
                isolated = str(metadata['labels']['isolated']).lower() == 'true'
            except (IndexError, AttributeError, KeyError):
                isolated = False
            self.log.info("build is isolated: %r", isolated)
            extra['image']['isolated'] = isolated

            fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
            if fs_result is not None:
                try:
                    fs_task_id = fs_result['filesystem-koji-task-id']
                except KeyError:
                    self.log.error("%s: expected filesystem-koji-task-id in result",
                                   AddFilesystemPlugin.key)
                else:
                    try:
                        task_id = int(fs_task_id)
                    except ValueError:
                        self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                    else:
                        extra['filesystem_koji_task_id'] = task_id

            extra['image'].update(get_parent_image_koji_data(self.workflow))

            flatpak_source_info = get_flatpak_source_info(self.workflow)
            if flatpak_source_info is not None:
                compose_info = get_compose_info(self.workflow)
                koji_metadata = compose_info.koji_metadata()
                koji_metadata['flatpak'] = True
                extra['image'].update(koji_metadata)

            resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
            if resolve_comp_result:
                extra['image']['odcs'] = {
                    'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                    'signing_intent': resolve_comp_result['signing_intent'],
                    'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
                }
            if self.workflow.all_yum_repourls:
                extra['image']['yum_repourls'] = self.workflow.all_yum_repourls

            self.set_help(extra, worker_metadatas)
            self.set_operators_metadata(extra, worker_metadatas)
            self.set_go_metadata(extra)
            self.set_group_manifest_info(extra, worker_metadatas)
        else:
            source_result = self.workflow.prebuild_results[PLUGIN_FETCH_SOURCES_KEY]
            extra['image']['sources_for_nvr'] = source_result['sources_for_nvr']
            extra['image']['sources_signing_intent'] = source_result['signing_intent']

        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        koji_task_owner = get_koji_task_owner(self.session, koji_task_id).get('name')
        extra['submitter'] = self.session.getLoggedInUser()['name']

        self.set_media_types(extra, worker_metadatas)

        build = {
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }
        if self.source_build:
            build.update({
                'name': self.workflow.koji_source_nvr['name'],
                'version': self.workflow.koji_source_nvr['version'],
                'release': self.workflow.koji_source_nvr['release'],
                'source': self.workflow.koji_source_source_url,
            })
        else:
            build.update({
                'name': component,
                'version': version,
                'release': release,
                'source': "{0}#{1}".format(source.uri, source.commit_id),
            })

        return build
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        builder = FlatpakBuilder(source, None, None)

        builder.precheck()

        # Create the dockerfile

        module_info = source.base_module

        # We need to enable all the modules other than the platform pseudo-module
        modules_str = ' '.join(builder.get_enable_modules())

        install_packages_str = ' '.join(builder.get_install_packages())

        df_path = os.path.join(self.workflow.builder.df_dir,
                               DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(
                DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                           stream=module_info.stream,
                                           version=module_info.version,
                                           base_image=self.base_image,
                                           modules=modules_str,
                                           packages=install_packages_str,
                                           rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        includepkgs = builder.get_includepkgs()
        includepkgs_path = os.path.join(self.workflow.builder.df_dir,
                                        'atomic-reactor-includepkgs')
        with open(includepkgs_path, 'w') as f:
            f.write('includepkgs = ' + ','.join(includepkgs) + '\n')

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir,
                                     "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            f.write(builder.get_cleanup_script())
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        compose_info = get_compose_info(self.workflow)

        repo = {
            'name': repo_name,
            'baseurl': compose_info.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = YumRepo(os.path.join(YUM_REPOS_DIR, repo_name)).dst_filename
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id',
                             compose_info.compose_id)
Beispiel #13
0
def test_resolve_module_compose(tmpdir, docker_tasker, specify_version):
    secrets_path = os.path.join(str(tmpdir), "secret")
    os.mkdir(secrets_path)
    with open(os.path.join(secrets_path, "token"), "w") as f:
        f.write("green_eggs_and_ham")

    workflow = mock_workflow(tmpdir)
    mock_get_retry_session()

    def handle_composes_post(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if isinstance(request.body, six.text_type):
            body = request.body
        else:
            body = request.body.decode()
        body_json = json.loads(body)
        assert body_json['source']['type'] == 'module'
        if specify_version:
            assert body_json['source']['source'] == MODULE_NSV
        else:
            assert body_json['source']['source'] == MODULE_NS
        return (200, {}, compose_json(0, 'wait'))

    responses.add_callback(responses.POST, ODCS_URL + '/composes/',
                           content_type='application/json',
                           callback=handle_composes_post)

    state = {'count': 1}

    def handle_composes_get(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if state['count'] == 1:
            response_json = compose_json(1, 'generating')
        else:
            response_json = compose_json(2, 'done')
        state['count'] += 1

        return (200, {}, response_json)

    responses.add_callback(responses.GET, ODCS_URL + '/composes/84',
                           content_type='application/json',
                           callback=handle_composes_get)

    def handle_unreleasedvariants(request):
        query = parse_qs(urlparse(request.url).query)

        assert query['variant_id'] == [MODULE_NAME]
        assert query['variant_version'] == [MODULE_STREAM]
        assert query['variant_release'] == [MODULE_VERSION]

        return (200, {}, json.dumps(LATEST_VERSION_JSON))

    responses.add_callback(responses.GET, PDC_URL + '/unreleasedvariants/',
                           content_type='application/json',
                           callback=handle_unreleasedvariants)

    args = {
        'module_name': 'eog',
        'module_stream': 'f26',
        'base_image': "registry.fedoraproject.org/fedora:latest",
        'odcs_url': ODCS_URL,
        'odcs_openidc_secret_path': secrets_path,
        'pdc_url': PDC_URL
    }
    if specify_version:
        args['module_version'] = MODULE_VERSION

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': ResolveModuleComposePlugin.key,
            'args': args
        }]
    )

    runner.run()

    compose_info = get_compose_info(workflow)

    assert compose_info.compose_id == 84
    assert compose_info.base_module.name == MODULE_NAME
    assert compose_info.base_module.stream == MODULE_STREAM
    assert compose_info.base_module.version == MODULE_VERSION
    assert compose_info.base_module.mmd.summary == 'Eye of GNOME Application Module'
def test_resolve_module_compose(tmpdir, docker_tasker, compose_ids, modules,
                                signing_intent, signing_intent_source, sigkeys):
    secrets_path = os.path.join(str(tmpdir), "secret")
    os.mkdir(secrets_path)
    with open(os.path.join(secrets_path, "token"), "w") as f:
        f.write("green_eggs_and_ham")

    if modules is not None:
        data = "compose:\n"
        data += "    modules:\n"
        for mod in modules:
            data += "    - {}\n".format(mod)
        if signing_intent_source == 'container_yaml':
            data += '    signing_intent: ' + signing_intent
        tmpdir.join(REPO_CONTAINER_CONFIG).write(data)

    module = None
    if modules:
        module = modules[0]

    workflow = mock_workflow(tmpdir)
    mock_get_retry_session()
    mock_koji_session()

    def handle_composes_post(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if isinstance(request.body, six.text_type):
            body = request.body
        else:
            body = request.body.decode()
        body_json = json.loads(body)
        assert body_json['source']['type'] == 'module'
        assert body_json['source']['source'] == module
        assert body_json['source']['sigkeys'] == sigkeys
        assert body_json['arches'] == ['ppc64le', 'x86_64']
        return (200, {}, compose_json(0, 'wait'))

    responses.add_callback(responses.POST, ODCS_URL + '/composes/',
                           content_type='application/json',
                           callback=handle_composes_post)

    state = {'count': 1}

    def handle_composes_get(request):
        assert request.headers['Authorization'] == 'Bearer green_eggs_and_ham'

        if state['count'] == 1:
            response_json = compose_json(1, 'generating')
        else:
            response_json = compose_json(2, 'done')
        state['count'] += 1

        return (200, {}, response_json)

    responses.add_callback(responses.GET, ODCS_URL + '/composes/84',
                           content_type='application/json',
                           callback=handle_composes_get)

    args = {
        'odcs_url': ODCS_URL,
        'odcs_openidc_secret_path': secrets_path,
        'compose_ids': compose_ids
    }

    if signing_intent_source == 'command_line':
        args['signing_intent'] = signing_intent

    workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
    workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\
        ReactorConfig({'version': 1,
                       'odcs': {'api_url': ODCS_URL,
                                'auth': {'openidc_dir': secrets_path},
                                'signing_intents': [
                                    {
                                        'name': 'unsigned',
                                        'keys': [],
                                    },
                                    {
                                        'name': 'release',
                                        'keys': ['R123', 'R234'],
                                    },
                                    {
                                        'name': 'beta',
                                        'keys': ['R123', 'B456', 'B457'],
                                    },
                                ],
                                'default_signing_intent': 'unsigned'},
                       'koji':  {'auth': {},
                                 'hub_url': 'https://koji.example.com/hub'}})

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': ResolveModuleComposePlugin.key,
            'args': args
        }]
    )

    if modules is None:
        with pytest.raises(PluginFailedException) as exc_info:
            runner.run()
        assert '"compose" config in container.yaml is required ' in str(exc_info.value)
    elif not modules:
        with pytest.raises(PluginFailedException) as exc_info:
            runner.run()
        assert '"compose" config has no modules' in str(exc_info.value)
    else:
        runner.run()

        compose_info = get_compose_info(workflow)

        assert compose_info.compose_id == 84
        assert compose_info.base_module.name == MODULE_NAME
        assert compose_info.base_module.stream == MODULE_STREAM
        assert compose_info.base_module.version == MODULE_VERSION
        assert compose_info.base_module.mmd.props.summary == 'Eye of GNOME Application Module'
        assert compose_info.base_module.rpms == [
            'eog-0:3.28.3-1.module_2123+73a9ef6f.src.rpm',
            'eog-0:3.28.3-1.module_2123+73a9ef6f.x86_64.rpm',
            'eog-0:3.28.3-1.module_2123+73a9ef6f.ppc64le.rpm',
        ]
    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels)
        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        extra['image'].update(get_parent_image_koji_data(self.workflow))

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            compose_info = get_compose_info(self.workflow)
            koji_metadata = compose_info.koji_metadata()
            koji_metadata['flatpak'] = True
            extra['image'].update(koji_metadata)

        koji_task_owner = get_koji_task_owner(self.session, koji_task_id).get('name')
        extra['submitter'] = self.session.getLoggedInUser()['name']

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }
        if self.workflow.all_yum_repourls:
            extra['image']['yum_repourls'] = self.workflow.all_yum_repourls

        self.set_help(extra, worker_metadatas)
        self.set_media_types(extra, worker_metadatas)
        self.set_go_metadata(extra)
        self.set_operators_metadata(extra, worker_metadatas)
        self.remove_unavailable_manifest_digests(worker_metadatas)
        self.set_group_manifest_info(extra, worker_metadatas)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        return build