def test_orchestrate_override_build_kwarg(tmpdir):
    workflow = mock_workflow(tmpdir)
    expected_kwargs = {
        'git_uri': SOURCE['uri'],
        'git_ref': 'master',
        'git_branch': 'master',
        'user': '******',
        'is_auto': False,
        'platform': 'x86_64',
        'release': '4242',
        'arrangement_version': 1
    }
    mock_osbs(worker_expect=expected_kwargs)
    mock_reactor_config(tmpdir)

    plugin_args = {
        'platforms': ['x86_64'],
        'build_kwargs': make_worker_build_kwargs(),
        'worker_build_image': 'fedora:latest',
        'osbs_client_config': str(tmpdir),
    }

    override_build_kwarg(workflow, 'release', '4242')

    runner = BuildStepPluginsRunner(
        workflow.builder.tasker,
        workflow,
        [{
            'name': OrchestrateBuildPlugin.key,
            'args': plugin_args,
        }]
    )

    build_result = runner.run()
    assert not build_result.is_failed()
Beispiel #2
0
    def set_worker_params(self, remote_sources):
        for remote_source in remote_sources:
            build_args = {}
            env_vars = self.cachito_session.get_request_env_vars(
                remote_source['request_id'])

            for env_var, value_info in env_vars.items():
                build_arg_value = value_info['value']
                kind = value_info['kind']
                if kind == 'path':
                    name = remote_source['name'] or ''
                    build_arg_value = os.path.join(REMOTE_SOURCE_DIR, name,
                                                   value_info['value'])
                    self.log.debug(
                        'Setting the Cachito environment variable "%s" to the absolute path "%s"',
                        env_var,
                        build_arg_value,
                    )
                    build_args[env_var] = build_arg_value
                elif kind == 'literal':
                    self.log.debug(
                        'Setting the Cachito environment variable "%s" to a literal value "%s"',
                        env_var,
                        build_arg_value,
                    )
                    build_args[env_var] = build_arg_value
                else:
                    raise RuntimeError(
                        f'Unknown kind {kind} got from Cachito.')

            remote_source['build_args'] = build_args
        override_build_kwarg(self.workflow, 'remote_sources', remote_sources)
 def _set_worker_arg(self, replacement_pullspecs):
     arg = {
         str(old): str(new)
         for old, new in replacement_pullspecs.items()
     }
     override_build_kwarg(self.workflow,
                          "operator_bundle_replacement_pullspecs", arg)
Beispiel #4
0
    def forward_composes(self):
        repos_by_arch = defaultdict(list)
        # set overrides by arch if arches are available
        for compose_info in self.composes_info:
            result_repofile = compose_info['result_repofile']
            try:
                arches = compose_info['arches']
            except KeyError:
                repos_by_arch[None].append(result_repofile)
            else:
                for arch in arches.split():
                    repos_by_arch[arch].append(result_repofile)

        # we should almost never have a None entry, but if we do, we need to merge
        # it with all other repos.
        try:
            noarch_repos = repos_by_arch.pop(None)
        except KeyError:
            pass
        else:
            for repos in repos_by_arch.values():
                repos.extend(noarch_repos)
        for arch, repofiles in repos_by_arch.items():
            override_build_kwarg(self.workflow, 'yum_repourls', repofiles,
                                 arch)
        # Only set the None override if there are no other repos
        if not repos_by_arch:
            override_build_kwarg(self.workflow, 'yum_repourls', noarch_repos,
                                 None)
def test_orchestrate_override_build_kwarg(tmpdir):
    workflow = mock_workflow(tmpdir)
    expected_kwargs = {
        'git_uri': SOURCE['uri'],
        'git_ref': 'master',
        'git_branch': 'master',
        'user': '******',
        'is_auto': False,
        'platform': 'x86_64',
        'release': '4242',
        'arrangement_version': 1
    }
    mock_osbs(worker_expect=expected_kwargs)
    mock_reactor_config(tmpdir)

    plugin_args = {
        'platforms': ['x86_64'],
        'build_kwargs': make_worker_build_kwargs(),
        'worker_build_image': 'fedora:latest',
        'osbs_client_config': str(tmpdir),
    }

    override_build_kwarg(workflow, 'release', '4242')

    runner = BuildStepPluginsRunner(workflow.builder.tasker, workflow,
                                    [{
                                        'name': OrchestrateBuildPlugin.key,
                                        'args': plugin_args,
                                    }])

    build_result = runner.run()
    assert not build_result.is_failed()
    def forward_composes(self):
        repos_by_arch = defaultdict(list)
        # set overrides by arch if arches are available
        for compose_info in self.composes_info:
            result_repofile = compose_info['result_repofile']
            try:
                arches = compose_info['arches']
            except KeyError:
                repos_by_arch[None].append(result_repofile)
            else:
                for arch in arches.split():
                    repos_by_arch[arch].append(result_repofile)

        # we should almost never have a None entry from composes,
        # but we can have yum_repos added, so if we do, we need to merge
        # it with all other repos.
        if self.repourls:
            repos_by_arch[None].extend(self.repourls)
        try:
            noarch_repos = repos_by_arch.pop(None)
        except KeyError:
            pass
        else:
            for repos in repos_by_arch.values():
                repos.extend(noarch_repos)

        for arch, repofiles in repos_by_arch.items():
            override_build_kwarg(self.workflow, 'yum_repourls', repofiles, arch)
        # Only set the None override if there are no other repos
        if not repos_by_arch:
            override_build_kwarg(self.workflow, 'yum_repourls', noarch_repos, None)
Beispiel #7
0
    def run(self):
        self.adjust_for_autorebuild()
        if self.allow_inheritance():
            self.adjust_for_inherit()
        self.workflow.all_yum_repourls = self.repourls

        try:
            self.read_configs()
        except SkipResolveComposesPlugin as abort_exc:
            override_build_kwarg(self.workflow, 'yum_repourls', self.repourls, None)
            self.log.info('Aborting plugin execution: %s', abort_exc)
            return

        self.adjust_compose_config()
        self.request_compose_if_needed()
        try:
            self.wait_for_composes()
        except WaitComposeToFinishTimeout as e:
            self.log.info(str(e))
            preserve_composes = set(self.compose_ids).union(self.parent_compose_ids)
            cancel_composes = set(self.all_compose_ids) - preserve_composes
            if cancel_composes:
                self.log.info('Canceling unfinished composes which were created by the build: %s',
                              cancel_composes)

            for compose_id in cancel_composes:
                if self.odcs_client.get_compose_status(compose_id) in ['wait', 'generating']:
                    self.log.info('Canceling the compose %s', compose_id)
                    self.odcs_client.cancel_compose(compose_id)
                else:
                    self.log.info('The compose %s is not in progress, skip canceling', compose_id)
            raise
        self.resolve_signing_intent()
        self.forward_composes()
        return self.make_result()
Beispiel #8
0
 def _set_worker_arg(self, replacement_pullspecs):
     arg = {
         str(repl['original']): str(repl['new'])
         for repl in replacement_pullspecs if repl['replaced']
     }
     override_build_kwarg(self.workflow,
                          "operator_bundle_replacement_pullspecs", arg)
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        # Create the dockerfile

        if source.runtime:
            profile = 'runtime'
        else:
            profile = 'default'

        module_info = source.compose.base_module

        packages = ' '.join(module_info.mmd.profiles[profile].rpms)

        df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                                stream=module_info.stream,
                                                version=module_info.version,
                                                base_image=self.base_image,
                                                packages=packages,
                                                rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir, "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            for line in source.flatpak_json.get('cleanup-commands', []):
                f.write(line)
                f.write("\n")
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        repo = {
            'name': repo_name,
            'baseurl': source.compose.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = os.path.join(YUM_REPOS_DIR, repo_name + '.repo')
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id', source.compose.compose_id)
    def pull_latest_commit_if_configured(self):
        if not self.should_use_latest_commit():
            return

        git_branch = self.build_labels['git-branch']
        self.workflow.source.reset('origin/{}'.format(git_branch))

        # Import it here to avoid circular import errors.
        from atomic_reactor.plugins.build_orchestrate_build import override_build_kwarg
        override_build_kwarg(self.workflow, 'git_ref', self.workflow.source.commit_id)
    def run(self):
        """
        run the plugin
        """

        self.log.info("Resolving module compose")

        compose_info = self._resolve_compose()
        set_compose_info(self.workflow, compose_info)
        override_build_kwarg(self.workflow, 'compose_ids', [compose_info.compose_id])
Beispiel #12
0
    def run(self):
        """
        run the plugin
        """

        self.log.info("Resolving module compose")

        compose_info = self._resolve_compose()
        set_compose_info(self.workflow, compose_info)
        override_build_kwarg(self.workflow, 'compose_ids', [compose_info.compose_id])
Beispiel #13
0
    def pull_latest_commit_if_configured(self):
        if not self.should_use_latest_commit():
            return

        git_branch = self.build_labels['git-branch']
        self.workflow.source.reset('origin/{}'.format(git_branch))

        # Import it here to avoid circular import errors.
        from atomic_reactor.plugins.build_orchestrate_build import override_build_kwarg
        override_build_kwarg(self.workflow, 'git_ref',
                             self.workflow.source.commit_id)
Beispiel #14
0
 def set_worker_params(self, source_request, remote_source_url):
     build_args = {
         # Turn the environment variables into absolute paths that
         # represent where the remote sources are copied to during
         # the build process.
         env_var: os.path.join(REMOTE_SOURCE_DIR, value)
         for env_var, value in source_request.get('environment_variables',
                                                  {}).items()
     }
     override_build_kwarg(self.workflow, 'remote_source_url',
                          remote_source_url)
     override_build_kwarg(self.workflow, 'remote_source_build_args',
                          build_args)
    def set_worker_params(self, source_request, remote_source_url,
                          remote_source_conf_url, remote_source_icm_url):
        build_args = {}
        # This matches values such as 'deps/gomod' but not 'true'
        rel_path_regex = re.compile(r'^[^/]+/[^/]+(?:/[^/]+)*$')
        for env_var, value in source_request.get('environment_variables',
                                                 {}).items():
            # Turn the environment variables that are relative paths into absolute paths that
            # represent where the remote sources are copied to during the build process.
            if re.match(rel_path_regex, value):
                abs_path = os.path.join(REMOTE_SOURCE_DIR, value)
                self.log.debug(
                    'Setting the Cachito environment variable "%s" to the absolute path "%s"',
                    env_var,
                    abs_path,
                )
                build_args[env_var] = abs_path
            else:
                build_args[env_var] = value

        # Alias for absolute path to cachito.env script added into buildargs
        build_args[CACHITO_ENV_ARG_ALIAS] = os.path.join(
            REMOTE_SOURCE_DIR, CACHITO_ENV_FILENAME)

        override_build_kwarg(self.workflow, 'remote_source_url',
                             remote_source_url)
        override_build_kwarg(self.workflow, 'remote_source_build_args',
                             build_args)
        override_build_kwarg(self.workflow, 'remote_source_configs',
                             remote_source_conf_url)
        override_build_kwarg(self.workflow, 'remote_source_icm_url',
                             remote_source_icm_url)
 def run(self):
     try:
         self.adjust_for_autorebuild()
         if self.allow_inheritance:
             self.adjust_for_inherit()
         self.workflow.all_yum_repourls = self.repourls
         self.read_configs()
         self.adjust_compose_config()
         self.request_compose_if_needed()
         self.wait_for_composes()
         self.resolve_signing_intent()
         self.forward_composes()
         return self.make_result()
     except SkipResolveComposesPlugin as abort_exc:
         override_build_kwarg(self.workflow, 'yum_repourls', self.repourls, None)
         self.log.info('Aborting plugin execution: %s', abort_exc)
 def run(self):
     try:
         self.adjust_for_autorebuild()
         if self.allow_inheritance():
             self.adjust_for_inherit()
         self.workflow.all_yum_repourls = self.repourls
         self.read_configs()
         self.adjust_compose_config()
         self.request_compose_if_needed()
         self.wait_for_composes()
         self.resolve_signing_intent()
         self.forward_composes()
         return self.make_result()
     except SkipResolveComposesPlugin as abort_exc:
         override_build_kwarg(self.workflow, 'yum_repourls', self.repourls,
                              None)
         self.log.info('Aborting plugin execution: %s', abort_exc)
    def forward_composes(self):
        repos_by_arch = defaultdict(list)
        # set overrides by arch if arches are available
        for compose_info in self.composes_info:
            result_repofile = compose_info['result_repofile']
            try:
                arches = compose_info['arches']
            except KeyError:
                repos_by_arch[None].append(result_repofile)
            else:
                for arch in arches.split():
                    repos_by_arch[arch].append(result_repofile)

        # we should almost never have a None entry from composes,
        # but we can have yum_repos added, so if we do, we need to merge
        # it with all other repos.
        if self.repourls:
            repos_by_arch[None].extend(self.repourls)
        try:
            noarch_repos = repos_by_arch.pop(None)
        except KeyError:
            pass
        else:
            for repos in repos_by_arch.values():
                repos.extend(noarch_repos)

        for arch, repofiles in repos_by_arch.items():
            override_build_kwarg(self.workflow, 'yum_repourls', repofiles,
                                 arch)

        # Only set the None override if there are no other repos
        if not repos_by_arch:
            override_build_kwarg(self.workflow, 'yum_repourls', noarch_repos,
                                 None)

        # If we don't think the set of packages available from the user-supplied repourls,
        # inherited repourls, and composed repositories is complete, set the 'include_koji_repo'
        # kwarg so that the so that the 'yum_repourls' kwarg that we just set doesn't
        # result in the 'koji' plugin being omitted.
        if not self.has_complete_repos:
            override_build_kwarg(self.workflow, 'include_koji_repo', True)

        # So that plugins like flatpak_update_dockerfile can get information about the composes
        override_build_kwarg(self.workflow, 'compose_ids',
                             self.all_compose_ids)
Beispiel #19
0
    def set_worker_params(self, source_request, remote_source_url,
                          remote_source_conf_url, remote_source_icm_url):
        build_args = {}
        env_vars = self.cachito_session.get_request_env_vars(
            source_request['id'])

        for env_var, value_info in env_vars.items():
            build_arg_value = value_info['value']
            kind = value_info['kind']
            if kind == 'path':
                build_arg_value = os.path.join(REMOTE_SOURCE_DIR,
                                               value_info['value'])
                self.log.debug(
                    'Setting the Cachito environment variable "%s" to the absolute path "%s"',
                    env_var,
                    build_arg_value,
                )
                build_args[env_var] = build_arg_value
            elif kind == 'literal':
                self.log.debug(
                    'Setting the Cachito environment variable "%s" to a literal value "%s"',
                    env_var,
                    build_arg_value,
                )
                build_args[env_var] = build_arg_value
            else:
                raise RuntimeError(f'Unknown kind {kind} got from Cachito.')

        # Alias for absolute path to cachito.env script added into buildargs
        build_args[CACHITO_ENV_ARG_ALIAS] = os.path.join(
            REMOTE_SOURCE_DIR, CACHITO_ENV_FILENAME)

        override_build_kwarg(self.workflow, 'remote_source_url',
                             remote_source_url)
        override_build_kwarg(self.workflow, 'remote_source_build_args',
                             build_args)
        override_build_kwarg(self.workflow, 'remote_source_configs',
                             remote_source_conf_url)
        override_build_kwarg(self.workflow, 'remote_source_icm_url',
                             remote_source_icm_url)
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        builder = FlatpakBuilder(source, None, None)

        builder.precheck()

        # Create the dockerfile

        module_info = source.base_module

        # We need to enable all the modules other than the platform pseudo-module
        modules_str = ' '.join(builder.get_enable_modules())

        install_packages_str = ' '.join(builder.get_install_packages())

        df_path = os.path.join(self.workflow.builder.df_dir,
                               DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(
                DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                           stream=module_info.stream,
                                           version=module_info.version,
                                           base_image=self.base_image,
                                           modules=modules_str,
                                           packages=install_packages_str,
                                           rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        includepkgs = builder.get_includepkgs()
        includepkgs_path = os.path.join(self.workflow.builder.df_dir,
                                        'atomic-reactor-includepkgs')
        with open(includepkgs_path, 'w') as f:
            f.write('includepkgs = ' + ','.join(includepkgs) + '\n')

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir,
                                     "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            f.write(builder.get_cleanup_script())
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        compose_info = get_compose_info(self.workflow)

        repo = {
            'name': repo_name,
            'baseurl': compose_info.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = YumRepo(os.path.join(YUM_REPOS_DIR, repo_name)).dst_filename
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id',
                             compose_info.compose_id)
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        # Create the dockerfile

        if source.runtime:
            profile = 'runtime'
        else:
            profile = 'default'

        module_info = source.compose.base_module

        packages = ' '.join(module_info.mmd.profiles[profile].rpms)

        df_path = os.path.join(self.workflow.builder.df_dir,
                               DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(
                DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                           stream=module_info.stream,
                                           version=module_info.version,
                                           base_image=self.base_image,
                                           packages=packages,
                                           rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir,
                                     "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            for line in source.flatpak_json.get('cleanup-commands', []):
                f.write(line)
                f.write("\n")
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        repo = {
            'name': repo_name,
            'baseurl': source.compose.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = os.path.join(YUM_REPOS_DIR, repo_name + '.repo')
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id',
                             source.compose.compose_id)
Beispiel #22
0
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        module_info = source.compose.base_module

        # For a runtime, certain information is duplicated between the container.yaml
        # and the modulemd, check that it matches
        if source.runtime:
            flatpak_yaml = source.flatpak_yaml
            flatpak_xmd = module_info.mmd.props.xmd['flatpak']

            def check(condition, what):
                if not condition:
                    raise RuntimeError(
                        "Mismatch for {} betweeen module xmd and container.yaml"
                        .format(what))

            check(flatpak_yaml['branch'] == flatpak_xmd['branch'], "'branch'")
            check(source.profile in flatpak_xmd['runtimes'], 'profile name')

            profile_xmd = flatpak_xmd['runtimes'][source.profile]

            check(flatpak_yaml['id'] == profile_xmd['id'], "'id'")
            check(
                flatpak_yaml.get('runtime',
                                 None) == profile_xmd.get('runtime', None),
                "'runtime'")
            check(
                flatpak_yaml.get('sdk', None) == profile_xmd.get('sdk', None),
                "'sdk'")

        # Create the dockerfile

        install_packages = module_info.mmd.peek_profiles()[
            source.profile].props.rpms.get()
        install_packages_str = ' '.join(install_packages)

        df_path = os.path.join(self.workflow.builder.df_dir,
                               DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(
                DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                           stream=module_info.stream,
                                           version=module_info.version,
                                           base_image=self.base_image,
                                           packages=install_packages_str,
                                           rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        # For a runtime, we want to make sure that the set of RPMs that is installed
        # into the filesystem is *exactly* the set that is listed in the runtime
        # profile. Requiring the full listed set of RPMs to be listed makes it
        # easier to catch unintentional changes in the package list that might break
        # applications depending on the runtime. It also simplifies the checking we
        # do for application flatpaks, since we can simply look at the runtime
        # modulemd to find out what packages are present in the runtime.
        #
        # For an application, we want to make sure that each RPM that is installed
        # into the filesystem is *either* an RPM that is part of the 'runtime'
        # profile of the base runtime, or from a module that was built with
        # flatpak-rpm-macros in the install root and, thus, prefix=/app.
        #
        # We achieve this by restricting the set of available packages in the dnf
        # configuration to just the ones that we want.
        #
        # The advantage of doing this upfront, rather than just checking after the
        # fact is that this makes sure that when a application is being installed,
        # we don't get a different package to satisfy a dependency than the one
        # in the runtime - e.g. aajohan-comfortaa-fonts to satisfy font(:lang=en)
        # because it's alphabetically first.

        if not source.runtime:
            runtime_module = source.runtime_module
            runtime_profile = runtime_module.mmd.peek_profiles()['runtime']
            available_packages = sorted(runtime_profile.props.rpms.get())

            for m in source.app_modules:
                # Strip off the '.rpm' suffix from the filename to get something
                # that DNF can parse.
                available_packages.extend(x[:-4] for x in m.rpms)
        else:
            base_module = source.compose.base_module
            runtime_profile = base_module.mmd.peek_profiles()['runtime']
            available_packages = sorted(runtime_profile.props.rpms.get())

        includepkgs_path = os.path.join(self.workflow.builder.df_dir,
                                        'atomic-reactor-includepkgs')
        with open(includepkgs_path, 'w') as f:
            f.write('includepkgs = ' + ','.join(available_packages) + '\n')

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir,
                                     "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            cleanup_commands = source.flatpak_yaml.get('cleanup-commands')
            if cleanup_commands is not None:
                f.write(cleanup_commands.rstrip())
                f.write("\n")
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        repo = {
            'name': repo_name,
            'baseurl': source.compose.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = os.path.join(YUM_REPOS_DIR, repo_name + '.repo')
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id',
                             source.compose.compose_id)
Beispiel #23
0
 def forward_composes(self):
     yum_repourls = [compose_info['result_repofile'] for compose_info in self.composes_info]
     override_build_kwarg(self.workflow, 'yum_repourls', yum_repourls)