def test_no_config(self):
        tasker, workflow = self.prepare()
        conf = get_config(workflow)
        assert isinstance(conf, ReactorConfig)

        same_conf = get_config(workflow)
        assert conf is same_conf
    def test_odcs_config_invalid_default_signing_intent(self, tmpdir):
        filename = str(tmpdir.join('config.yaml'))
        with open(filename, 'w') as fp:
            fp.write(dedent("""\
                version: 1
                odcs:
                   signing_intents:
                   - name: release
                     keys: [R123]
                   - name: beta
                     keys: [R123, B456]
                   - name: unsigned
                     keys: []
                   default_signing_intent: spam
                   api_url: http://odcs.example.com
                   auth:
                       ssl_certs_dir: /var/run/secrets/atomic-reactor/odcssecret
                """))

        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None

        with pytest.raises(ValueError) as exc_info:
            get_config(workflow).get_odcs_config()
        message = str(exc_info.value)
        assert message == dedent("""\
            unknown signing intent name "spam", valid names: unsigned, beta, release
            """.rstrip())
    def test_odcs_config(self, tmpdir, default):
        filename = str(tmpdir.join('config.yaml'))
        with open(filename, 'w') as fp:
            fp.write(dedent("""\
                version: 1
                odcs:
                   signing_intents:
                   - name: release
                     keys: [R123, R234]
                   - name: beta
                     keys: [R123, B456, B457]
                   - name: unsigned
                     keys: []
                   default_signing_intent: {default}
                   api_url: http://odcs.example.com
                   auth:
                       ssl_certs_dir: /var/run/secrets/atomic-reactor/odcssecret
                """.format(default=default)))

        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None

        odcs_config = get_config(workflow).get_odcs_config()

        assert odcs_config.default_signing_intent == default

        unsigned_intent = {'name': 'unsigned', 'keys': [], 'restrictiveness': 0}
        beta_intent = {'name': 'beta', 'keys': ['R123', 'B456', 'B457'], 'restrictiveness': 1}
        release_intent = {'name': 'release', 'keys': ['R123', 'R234'], 'restrictiveness': 2}
        assert odcs_config.signing_intents == [
            unsigned_intent, beta_intent, release_intent
        ]
        assert odcs_config.get_signing_intent_by_name('release') == release_intent
        assert odcs_config.get_signing_intent_by_name('beta') == beta_intent
        assert odcs_config.get_signing_intent_by_name('unsigned') == unsigned_intent

        with pytest.raises(ValueError):
            odcs_config.get_signing_intent_by_name('missing')

        assert odcs_config.get_signing_intent_by_keys(['R123', 'R234'])['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys('R123 R234')['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(['R123'])['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys('R123')['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(['R123', 'B456'])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(['B456', 'R123'])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys('B456 R123')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys('R123 B456 ')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(['B456'])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys('B456')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys([])['name'] == 'unsigned'
        assert odcs_config.get_signing_intent_by_keys('')['name'] == 'unsigned'

        with pytest.raises(ValueError):
            assert odcs_config.get_signing_intent_by_keys(['missing'])
        with pytest.raises(ValueError):
            assert odcs_config.get_signing_intent_by_keys(['R123', 'R234', 'B457'])
    def test_good_cluster_config(self, tmpdir, config, clusters):
        filename = os.path.join(str(tmpdir), 'config.yaml')
        with open(filename, 'w') as fp:
            fp.write(dedent(config))
        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None

        conf = get_config(workflow)
        enabled = conf.get_enabled_clusters_for_platform('platform')
        assert set([(x.name, x.max_concurrent_builds)
                    for x in enabled]) == set(clusters)
    def __init__(self, tasker, workflow, koji_target=None):

        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param koji_target: str, Koji build target name
        """
        # call parent constructor
        super(CheckAndSetPlatformsPlugin, self).__init__(tasker, workflow)
        self.koji_target = koji_target
        self.reactor_config = get_config(self.workflow)
    def test_odcs_config_invalid_default_signing_intent(self, tmpdir):
        filename = str(tmpdir.join('config.yaml'))
        with open(filename, 'w') as fp:
            fp.write(dedent("""\
                version: 1
                odcs:
                   signing_intents:
                   - name: release
                     keys: [R123]
                   - name: beta
                     keys: [R123, B456]
                   - name: unsigned
                     keys: []
                   default_signing_intent: spam
                """))

        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None

        with pytest.raises(ValueError) as exc_info:
            get_config(workflow).get_odcs_config()
        assert 'unknown signing intent' in str(exc_info.value)
    def test_good_cluster_config(self, tmpdir, reactor_config_map, config, clusters):
        if reactor_config_map and config:
            os.environ['REACTOR_CONFIG'] = dedent(config)
        else:
            filename = os.path.join(str(tmpdir), 'config.yaml')
            with open(filename, 'w') as fp:
                fp.write(dedent(config))
        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None
        os.environ.pop('REACTOR_CONFIG', None)

        conf = get_config(workflow)
        enabled = conf.get_enabled_clusters_for_platform('platform')
        assert set([(x.name, x.max_concurrent_builds)
                    for x in enabled]) == set(clusters)
    def read_configs(self):
        self.odcs_config = get_config(self.workflow).get_odcs_config()
        if not self.odcs_config:
            raise SkipResolveComposesPlugin('ODCS config not found')

        workdir = self.workflow.source.get_build_file_path()[1]
        file_path = os.path.join(workdir, self.REPO_CONFIG)
        data = None
        if os.path.exists(file_path):
            with open(file_path) as f:
                data = (yaml.load(f) or {}).get('compose')

        if not data and not self.compose_ids:
            raise SkipResolveComposesPlugin('"compose" config not set and compose_ids not given')

        self.compose_config = ComposeConfig(data, self.odcs_config)
    def test_good_cluster_config(self, tmpdir, reactor_config_map, config,
                                 clusters):
        if reactor_config_map and config:
            os.environ['REACTOR_CONFIG'] = dedent(config)
        else:
            filename = os.path.join(str(tmpdir), 'config.yaml')
            with open(filename, 'w') as fp:
                fp.write(dedent(config))
        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None
        os.environ.pop('REACTOR_CONFIG', None)

        conf = get_config(workflow)
        enabled = conf.get_enabled_clusters_for_platform('platform')
        assert set([(x.name, x.max_concurrent_builds)
                    for x in enabled]) == set(clusters)
Exemplo n.º 10
0
    def test_odcs_config(self, tmpdir, default):
        filename = str(tmpdir.join('config.yaml'))
        with open(filename, 'w') as fp:
            fp.write(dedent("""\
                version: 1
                odcs:
                   signing_intents:
                   - name: release
                     keys: [R123]
                   - name: beta
                     keys: [R123, B456]
                   - name: unsigned
                     keys: []
                   default_signing_intent: {default}
                """.format(default=default)))

        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None

        odcs_config = get_config(workflow).get_odcs_config()

        assert odcs_config.default_signing_intent == default

        assert odcs_config.signing_intents == [
            {'name': 'unsigned', 'keys': [], 'restrictiveness': 0},
            {'name': 'beta', 'keys': ['R123', 'B456'], 'restrictiveness': 1},
            {'name': 'release', 'keys': ['R123'], 'restrictiveness': 2},
        ]

        with pytest.raises(ValueError):
            odcs_config.get_signing_intent_by_name('missing')

        assert odcs_config.get_signing_intent_by_keys(['R123'])['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys('R123')['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(['R123', 'B456'])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(['B456', 'R123'])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys('B456 R123')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys([])['name'] == 'unsigned'
        assert odcs_config.get_signing_intent_by_keys('')['name'] == 'unsigned'

        with pytest.raises(ValueError):
            assert odcs_config.get_signing_intent_by_keys(['missing'])
Exemplo n.º 11
0
    def get_signing_intent(self):
        """Get the signing intent to be used to fetch files from Koji

        :return: dict, signing intent object as per atomic_reactor/schemas/config.json
        """
        odcs_config = get_config(self.workflow).get_odcs_config()
        if odcs_config is None:
            self.log.warning('No ODCS configuration available. Allowing unsigned SRPMs')
            return {'keys': None}

        if not self.signing_intent:
            try:
                self.signing_intent = self.koji_build['extra']['image']['odcs']['signing_intent']
            except (KeyError, TypeError):
                self.log.debug('Image koji build, %s(%s), does not define signing_intent.',
                               self.koji_build_nvr, self.koji_build_id)
                self.signing_intent = odcs_config.default_signing_intent

        signing_intent = odcs_config.get_signing_intent_by_name(self.signing_intent)
        return signing_intent
    def choose_cluster(self, platform):
        config = get_config(self.workflow)
        clusters = [self.get_cluster_info(cluster, platform) for cluster in
                    config.get_enabled_clusters_for_platform(platform)]

        if not clusters:
            raise RuntimeError('No clusters found for platform {}!'
                               .format(platform))

        reachable_clusters = [cluster for cluster in clusters
                              if cluster.load != self.UNREACHABLE_CLUSTER_LOAD]

        if not reachable_clusters:
            raise RuntimeError('All clusters for platform {} are unreachable!'
                               .format(platform))

        selected = min(reachable_clusters, key=lambda c: c.load)
        self.log.info('platform %s will use cluster %s',
                      platform, selected.cluster.name)
        return selected
    def read_configs(self):
        self.odcs_config = get_config(self.workflow).get_odcs_config()
        if not self.odcs_config:
            raise SkipResolveComposesPlugin('ODCS config not found')

        data = self.workflow.source.config.compose
        if not data and not self.all_compose_ids:
            raise SkipResolveComposesPlugin('"compose" config not set and compose_ids not given')

        workdir = self.workflow.source.get_build_file_path()[1]
        file_path = os.path.join(workdir, REPO_CONTENT_SETS_CONFIG)
        pulp_data = None
        if os.path.exists(file_path):
            with open(file_path) as f:
                pulp_data = yaml.safe_load(f) or {}

        platforms = get_platforms(self.workflow)
        if platforms:
            platforms = sorted(platforms)  # sorted to keep predictable for tests

        self.compose_config = ComposeConfig(data, pulp_data, self.odcs_config,
                                            arches=platforms)
Exemplo n.º 14
0
    def read_configs(self):
        self.odcs_config = get_config(self.workflow).get_odcs_config()
        if not self.odcs_config:
            raise SkipResolveComposesPlugin('ODCS config not found')

        data = self.workflow.source.config.compose
        if not data and not self.all_compose_ids:
            raise SkipResolveComposesPlugin('"compose" config not set and compose_ids not given')

        pulp_data = util.read_content_sets(self.workflow) or {}

        platforms = get_platforms(self.workflow)
        if platforms:
            platforms = sorted(platforms)  # sorted to keep predictable for tests

        koji_tag = None
        if self.koji_target:
            target_info = self.koji_session.getBuildTarget(self.koji_target, strict=True)
            koji_tag = target_info['build_tag_name']

        self.compose_config = ComposeConfig(data, pulp_data, self.odcs_config, koji_tag=koji_tag,
                                            arches=platforms)
    def select_and_start_cluster(self, platform):
        ''' Choose a cluster and start a build on it '''

        config = get_config(self.workflow)
        clusters = config.get_enabled_clusters_for_platform(platform)

        if not clusters:
            raise UnknownPlatformException('No clusters found for platform {}!'
                                           .format(platform))

        retry_contexts = {
            cluster.name: ClusterRetryContext(self.max_cluster_fails)
            for cluster in clusters
        }

        while True:
            try:
                possible_cluster_info = self.get_clusters(platform,
                                                          retry_contexts,
                                                          clusters)
            except AllClustersFailedException as ex:
                cluster = ClusterInfo(None, platform, None, None)
                build_info = WorkerBuildInfo(build=None,
                                             cluster_info=cluster,
                                             logger=self.log)
                build_info.monitor_exception = repr(ex)
                self.worker_builds.append(build_info)
                return

            for cluster_info in possible_cluster_info:
                ctx = retry_contexts[cluster_info.cluster.name]
                try:
                    self.log.info('Attempting to start build for platform %s on cluster %s',
                                  platform, cluster_info.cluster.name)
                    self.do_worker_build(cluster_info)
                    return
                except OsbsException:
                    ctx.try_again_later(self.failure_retry_delay)
    def select_and_start_cluster(self, platform):
        ''' Choose a cluster and start a build on it '''

        config = get_config(self.workflow)
        clusters = config.get_enabled_clusters_for_platform(platform)

        if not clusters:
            raise UnknownPlatformException(
                'No clusters found for platform {}!'.format(platform))

        retry_contexts = {
            cluster.name: ClusterRetryContext(self.max_cluster_fails)
            for cluster in clusters
        }

        while True:
            try:
                possible_cluster_info = self.get_clusters(
                    platform, retry_contexts, clusters)
            except AllClustersFailedException as ex:
                cluster = ClusterInfo(None, platform, None, None)
                build_info = WorkerBuildInfo(build=None,
                                             cluster_info=cluster,
                                             logger=self.log)
                build_info.monitor_exception = repr(ex)
                self.worker_builds.append(build_info)
                return

            for cluster_info in possible_cluster_info:
                ctx = retry_contexts[cluster_info.cluster.name]
                try:
                    self.log.info(
                        'Attempting to start build for platform %s on cluster %s',
                        platform, cluster_info.cluster.name)
                    self.do_worker_build(cluster_info)
                    return
                except OsbsException:
                    ctx.try_again_later(self.failure_retry_delay)
Exemplo n.º 17
0
    def read_configs(self):
        self.odcs_config = get_config(self.workflow).get_odcs_config()
        if not self.odcs_config:
            raise SkipResolveComposesPlugin('ODCS config not found')

        data = self.workflow.source.config.compose
        if not data and not self.compose_ids:
            raise SkipResolveComposesPlugin(
                '"compose" config not set and compose_ids not given')

        workdir = self.workflow.source.get_build_file_path()[1]
        file_path = os.path.join(workdir, REPO_CONTENT_SETS_CONFIG)
        pulp_data = None
        if os.path.exists(file_path):
            with open(file_path) as f:
                pulp_data = yaml.safe_load(f) or {}

        arches = self.get_arches()

        self.compose_config = ComposeConfig(data,
                                            pulp_data,
                                            self.odcs_config,
                                            arches=arches)
    def __init__(self,
                 tasker,
                 workflow,
                 build_kwargs,
                 platforms=None,
                 osbs_client_config=None,
                 worker_build_image=None,
                 config_kwargs=None,
                 find_cluster_retry_delay=FIND_CLUSTER_RETRY_DELAY,
                 failure_retry_delay=FAILURE_RETRY_DELAY,
                 max_cluster_fails=MAX_CLUSTER_FAILS,
                 url=None,
                 verify_ssl=True,
                 use_auth=True,
                 goarch=None):
        """
        constructor

        :param tasker: ContainerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param build_kwargs: dict, keyword arguments for starting worker builds
        :param platforms: list<str>, platforms to build
                          (used via utils.get_orchestrator_platforms())
        :param osbs_client_config: str, path to directory containing osbs.conf
        :param worker_build_image: str, the builder image to use for worker builds
                                  (not used, image is inherited from the orchestrator)
        :param config_kwargs: dict, keyword arguments to override worker configuration
        :param find_cluster_retry_delay: the delay in seconds to try again reaching a cluster
        :param failure_retry_delay: the delay in seconds to try again starting a build
        :param max_cluster_fails: the maximum number of times a cluster can fail before being
                                  ignored
        :param goarch: dict, keys are platform, values are go language platform names
        """
        super(OrchestrateBuildPlugin, self).__init__(tasker, workflow)
        self.platforms = get_platforms(self.workflow)

        self.build_kwargs = build_kwargs
        self.osbs_client_config_fallback = osbs_client_config
        self.config_kwargs = config_kwargs or {}

        self.adjust_build_kwargs()
        self.validate_arrangement_version()
        self.adjust_config_kwargs()
        self.reactor_config = get_config(self.workflow)

        self.find_cluster_retry_delay = find_cluster_retry_delay
        self.failure_retry_delay = failure_retry_delay
        self.max_cluster_fails = max_cluster_fails
        self.koji_upload_dir = generate_koji_upload_dir()
        self.fs_task_id = self.get_fs_task_id()
        self.release = self.get_release()

        self.plat_des_fallback = []
        for plat, architecture in (goarch or {}).items():
            plat_dic = {'platform': plat, 'architecture': architecture}
            self.plat_des_fallback.append(plat_dic)

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {
                'enable': use_auth
            }
        }

        if worker_build_image:
            self.log.warning('worker_build_image is deprecated')

        self.worker_builds = []
        self.namespace = get_build_json().get('metadata',
                                              {}).get('namespace', None)
        self.build_image_digests = {}  # by platform
        self._openshift_session = None
        self.build_image_override = get_build_image_override(workflow, {})
        self.platform_descriptors = get_platform_descriptors(
            self.workflow, self.plat_des_fallback)
    def __init__(self, tasker, workflow, build_kwargs, platforms=None,
                 osbs_client_config=None, worker_build_image=None,
                 config_kwargs=None,
                 find_cluster_retry_delay=FIND_CLUSTER_RETRY_DELAY,
                 failure_retry_delay=FAILURE_RETRY_DELAY,
                 max_cluster_fails=MAX_CLUSTER_FAILS,
                 url=None, verify_ssl=True, use_auth=True,
                 goarch=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param build_kwargs: dict, keyword arguments for starting worker builds
        :param platforms: list<str>, platforms to build
                          (used via utils.get_orchestrator_platforms())
        :param osbs_client_config: str, path to directory containing osbs.conf
        :param worker_build_image: str, the builder image to use for worker builds
                                  (not used, image is inherited from the orchestrator)
        :param config_kwargs: dict, keyword arguments to override worker configuration
        :param find_cluster_retry_delay: the delay in seconds to try again reaching a cluster
        :param failure_retry_delay: the delay in seconds to try again starting a build
        :param max_cluster_fails: the maximum number of times a cluster can fail before being
                                  ignored
        :param goarch: dict, keys are platform, values are go language platform names
        """
        super(OrchestrateBuildPlugin, self).__init__(tasker, workflow)
        self.platforms = get_platforms(self.workflow)

        self.build_kwargs = build_kwargs
        self.osbs_client_config_fallback = osbs_client_config
        self.config_kwargs = config_kwargs or {}

        self.adjust_build_kwargs()
        self.validate_arrangement_version()
        self.adjust_config_kwargs()
        self.reactor_config = get_config(self.workflow)

        self.find_cluster_retry_delay = find_cluster_retry_delay
        self.failure_retry_delay = failure_retry_delay
        self.max_cluster_fails = max_cluster_fails
        self.koji_upload_dir = self.get_koji_upload_dir()
        self.fs_task_id = self.get_fs_task_id()
        self.release = self.get_release()

        self.plat_des_fallback = []
        for plat, architecture in (goarch or {}).items():
            plat_dic = {'platform': plat,
                        'architecture': architecture}
            self.plat_des_fallback.append(plat_dic)

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth}
        }

        if worker_build_image:
            self.log.warning('worker_build_image is deprecated')

        self.worker_builds = []
        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        self.build_image_digests = {}  # by platform
        self._openshift_session = None
        self.build_image_override = get_build_image_override(workflow, {})
        self.platform_descriptors = get_platform_descriptors(self.workflow, self.plat_des_fallback)
    def test_odcs_config(self, tmpdir, default):
        filename = str(tmpdir.join('config.yaml'))
        with open(filename, 'w') as fp:
            fp.write(
                dedent("""\
                version: 1
                odcs:
                   signing_intents:
                   - name: release
                     keys: [R123]
                   - name: beta
                     keys: [R123, B456]
                   - name: unsigned
                     keys: []
                   default_signing_intent: {default}
                """.format(default=default)))

        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None

        odcs_config = get_config(workflow).get_odcs_config()

        assert odcs_config.default_signing_intent == default

        assert odcs_config.signing_intents == [
            {
                'name': 'unsigned',
                'keys': [],
                'restrictiveness': 0
            },
            {
                'name': 'beta',
                'keys': ['R123', 'B456'],
                'restrictiveness': 1
            },
            {
                'name': 'release',
                'keys': ['R123'],
                'restrictiveness': 2
            },
        ]

        with pytest.raises(ValueError):
            odcs_config.get_signing_intent_by_name('missing')

        assert odcs_config.get_signing_intent_by_keys(['R123'
                                                       ])['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(
            'R123')['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(['R123', 'B456'
                                                       ])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(['B456', 'R123'
                                                       ])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(
            'B456 R123')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys([])['name'] == 'unsigned'
        assert odcs_config.get_signing_intent_by_keys('')['name'] == 'unsigned'

        with pytest.raises(ValueError):
            assert odcs_config.get_signing_intent_by_keys(['missing'])
Exemplo n.º 21
0
    def _resolve_compose(self):
        odcs_config = get_config(self.workflow).get_odcs_config()
        odcs_client = get_odcs_session(self.workflow, self.odcs_fallback)
        self.read_configs_general()

        modules = self.data.get('modules', [])

        if not modules:
            raise RuntimeError('"compose" config has no modules, a module is required for Flatpaks')

        source_spec = modules[0]
        if len(modules) > 1:
            self.log.info("compose config contains multiple modules,"
                          "using first module %s", source_spec)

        module = ModuleSpec.from_str(source_spec)
        self.log.info("Resolving module compose for name=%s, stream=%s, version=%s",
                      module.name, module.stream, module.version)

        noprofile_spec = module.to_str(include_profile=False)

        if self.compose_ids:
            if len(self.compose_ids) > 1:
                self.log.info("Multiple compose_ids, using first compose %d", self.compose_ids[0])
            self.compose_id = self.compose_ids[0]

        if self.signing_intent_name is not None:
            signing_intent_name = self.signing_intent_name
        else:
            signing_intent_name = self.data.get('signing_intent',
                                                odcs_config.default_signing_intent)
        signing_intent = odcs_config.get_signing_intent_by_name(signing_intent_name)

        if self.compose_id is None:
            arches = sorted(get_platforms(self.workflow))
            self.compose_id = odcs_client.start_compose(source_type='module',
                                                        source=noprofile_spec,
                                                        sigkeys=signing_intent['keys'],
                                                        arches=arches)['id']

        compose_info = odcs_client.wait_for_compose(self.compose_id)
        if compose_info['state_name'] != "done":
            raise RuntimeError("Compose cannot be retrieved, state='%s'" %
                               compose_info['state_name'])

        compose_source = compose_info['source']
        self.log.info("Resolved list of modules: %s", compose_source)

        resolved_modules = self._resolve_modules(compose_source)
        base_module = resolved_modules[module.name]
        assert base_module.stream == module.stream
        if module.version is not None:
            assert base_module.version == module.version

        return ComposeInfo(source_spec=source_spec,
                           compose_id=self.compose_id,
                           base_module=base_module,
                           modules=resolved_modules,
                           repo_url=compose_info['result_repo'] + '/$basearch/os/',
                           signing_intent=signing_intent_name,
                           signing_intent_overridden=self.signing_intent_name is not None)
Exemplo n.º 22
0
    def test_odcs_config(self, tmpdir, default):
        filename = str(tmpdir.join('config.yaml'))
        with open(filename, 'w') as fp:
            fp.write(
                dedent("""\
                version: 1
                odcs:
                   signing_intents:
                   - name: release
                     keys: [R123, R234]
                   - name: beta
                     keys: [R123, B456, B457]
                   - name: unsigned
                     keys: []
                   default_signing_intent: {default}
                   api_url: http://odcs.example.com
                   auth:
                       ssl_certs_dir: /var/run/secrets/atomic-reactor/odcssecret
                """.format(default=default)))

        tasker, workflow = self.prepare()
        plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
        assert plugin.run() is None

        odcs_config = get_config(workflow).get_odcs_config()

        assert odcs_config.default_signing_intent == default

        unsigned_intent = {
            'name': 'unsigned',
            'keys': [],
            'restrictiveness': 0
        }
        beta_intent = {
            'name': 'beta',
            'keys': ['R123', 'B456', 'B457'],
            'restrictiveness': 1
        }
        release_intent = {
            'name': 'release',
            'keys': ['R123', 'R234'],
            'restrictiveness': 2
        }
        assert odcs_config.signing_intents == [
            unsigned_intent, beta_intent, release_intent
        ]
        assert odcs_config.get_signing_intent_by_name(
            'release') == release_intent
        assert odcs_config.get_signing_intent_by_name('beta') == beta_intent
        assert odcs_config.get_signing_intent_by_name(
            'unsigned') == unsigned_intent

        with pytest.raises(ValueError):
            odcs_config.get_signing_intent_by_name('missing')

        assert odcs_config.get_signing_intent_by_keys(['R123', 'R234'
                                                       ])['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(
            'R123 R234')['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(['R123'
                                                       ])['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(
            'R123')['name'] == 'release'
        assert odcs_config.get_signing_intent_by_keys(['R123', 'B456'
                                                       ])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(['B456', 'R123'
                                                       ])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(
            'B456 R123')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(
            'R123 B456 ')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys(['B456'
                                                       ])['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys('B456')['name'] == 'beta'
        assert odcs_config.get_signing_intent_by_keys([])['name'] == 'unsigned'
        assert odcs_config.get_signing_intent_by_keys('')['name'] == 'unsigned'

        with pytest.raises(ValueError):
            assert odcs_config.get_signing_intent_by_keys(['missing'])
        with pytest.raises(ValueError):
            assert odcs_config.get_signing_intent_by_keys(
                ['R123', 'R234', 'B457'])
    def _resolve_compose(self):
        odcs_config = get_config(self.workflow).get_odcs_config()
        odcs_client = get_odcs_session(self.workflow, self.odcs_fallback)
        self.read_configs_general()

        modules = self.data.get('modules', [])

        if not modules:
            raise RuntimeError('"compose" config has no modules, a module is required for Flatpaks')

        source_spec = modules[0]
        if len(modules) > 1:
            self.log.info("compose config contains multiple modules,"
                          "using first module %s", source_spec)

        module = ModuleSpec.from_str(source_spec)
        self.log.info("Resolving module compose for name=%s, stream=%s, version=%s",
                      module.name, module.stream, module.version)

        noprofile_spec = module.to_str(include_profile=False)

        if self.compose_ids:
            if len(self.compose_ids) > 1:
                self.log.info("Multiple compose_ids, using first compose %d", self.compose_ids[0])
            self.compose_id = self.compose_ids[0]

        if self.signing_intent_name is not None:
            signing_intent_name = self.signing_intent_name
        else:
            signing_intent_name = self.data.get('signing_intent',
                                                odcs_config.default_signing_intent)
        signing_intent = odcs_config.get_signing_intent_by_name(signing_intent_name)

        if self.compose_id is None:
            arches = sorted(get_platforms(self.workflow))
            self.compose_id = odcs_client.start_compose(source_type='module',
                                                        source=noprofile_spec,
                                                        sigkeys=signing_intent['keys'],
                                                        arches=arches)['id']

        compose_info = odcs_client.wait_for_compose(self.compose_id)
        if compose_info['state_name'] != "done":
            raise RuntimeError("Compose cannot be retrieved, state='%s'" %
                               compose_info['state_name'])

        compose_source = compose_info['source']
        self.log.info("Resolved list of modules: %s", compose_source)

        resolved_modules = self._resolve_modules(compose_source)
        base_module = resolved_modules[module.name]
        assert base_module.stream == module.stream
        if module.version is not None:
            assert base_module.version == module.version

        return ComposeInfo(source_spec=source_spec,
                           compose_id=self.compose_id,
                           base_module=base_module,
                           modules=resolved_modules,
                           repo_url=compose_info['result_repo'] + '/$basearch/os/',
                           signing_intent=signing_intent_name,
                           signing_intent_overridden=self.signing_intent_name is not None)