Exemple #1
0
    def __init__(self, build_path):
        self.data = {}
        self.file_path = os.path.join(build_path, REPO_CONTAINER_CONFIG)
        if os.path.exists(self.file_path):
            try:
                # read file and validate against schema
                self.data = read_yaml_from_file_path(
                    self.file_path, 'schemas/container.json', 'osbs'
                ) or {}
            except Exception:
                logger.exception(
                    "Failed to load and validate source config YAML from %s",
                    self.file_path
                )
                raise

        self.release_env_var = self.data.get('set_release_env')
        self.autorebuild = self.data.get('autorebuild') or {}
        self.flatpak = self.data.get('flatpak')
        self.compose = self.data.get('compose')
        self.go = self.data.get('go') or {}
        self.image_build_method = meth = self.data.get('image_build_method')
        self.inherit = self.compose.get('inherit', False) if self.compose else False
        if self.compose:
            # removing inherit from compose so it can be evaluated as a bool in order
            # to decide whether any ODCS composes will be created
            self.compose.pop('inherit', None)
        assert meth is None or meth in CONTAINER_BUILD_METHODS, (
               "unknown build method '{}' specified in {}; also, schema validated it."
               .format(meth, REPO_CONTAINER_CONFIG)
        )
        self.remote_source = self.data.get('remote_source')
        self.remote_sources = self.data.get('remote_sources')
        self.operator_manifests = self.data.get('operator_manifests')
Exemple #2
0
    def __init__(self,
                 config_path=None,
                 env_name=REACTOR_CONFIG_ENV_NAME,
                 raw_config=None):
        self.conf = deepcopy(self.DEFAULT_CONFIG)
        reactor_config_from_env = os.environ.get(env_name, None)

        if raw_config:
            logger.info("reading config from raw_config kwarg")
            self.conf = deepcopy(raw_config)

        elif reactor_config_from_env:
            logger.info("reading config from %s env variable", env_name)
            self.conf = read_yaml(reactor_config_from_env,
                                  'schemas/config.json')

        elif config_path and os.path.exists(config_path):
            logger.info("reading config from %s", config_path)
            self.conf = read_yaml_from_file_path(config_path,
                                                 'schemas/config.json')

        else:
            logger.info("using default config: %s", self.DEFAULT_CONFIG)

        version = self.conf[ReactorConfigKeys.VERSION_KEY]
        if version != 1:
            raise ValueError("version %r unknown" % version)

        logger.info("reading config content %s", self.conf)
Exemple #3
0
    def __init__(self, build_path):
        self.data = {}
        self.file_path = os.path.join(build_path, REPO_CONTAINER_CONFIG)
        if os.path.exists(self.file_path):
            try:
                # read file and validate against schema
                self.data = read_yaml_from_file_path(
                    self.file_path, 'schemas/container.json', 'osbs') or {}
            except Exception:
                logger.exception(
                    "Failed to load and validate source config YAML from %s",
                    self.file_path)
                raise

        self.release_env_var = self.data.get('set_release_env')
        self.flatpak = self.data.get('flatpak')
        self.compose = self.data.get('compose')
        self.go = self.data.get('go') or {}
        self.inherit = self.compose.get('inherit',
                                        False) if self.compose else False
        if self.compose:
            # removing inherit from compose so it can be evaluated as a bool in order
            # to decide whether any ODCS composes will be created
            self.compose.pop('inherit', None)
        self.remote_source = self.data.get('remote_source')
        self.remote_sources = self.data.get('remote_sources')
        self.operator_manifests = self.data.get('operator_manifests')

        self.platforms = self.data.get('platforms') or {'not': [], 'only': []}
        self.platforms['not'] = make_list(self.platforms.get('not', []))
        self.platforms['only'] = make_list(self.platforms.get('only', []))
    def __init__(self, build_path):
        self.data = {}
        self.file_path = os.path.join(build_path, REPO_CONTAINER_CONFIG)
        if os.path.exists(self.file_path):
            try:
                # read file and validate against schema
                self.data = read_yaml_from_file_path(
                    self.file_path, 'schemas/container.json'
                ) or {}
            except Exception:
                logger.exception(
                    "Failed to load and validate source config YAML from %s",
                    self.file_path
                )
                raise

        self.autorebuild = self.data.get('autorebuild') or {}
        self.flatpak = self.data.get('flatpak')
        self.compose = self.data.get('compose')
        self.go = self.data.get('go') or {}
        self.image_build_method = meth = self.data.get('image_build_method')
        self.inherit = self.compose.get('inherit', False) if self.compose else False
        if self.compose:
            # removing inherit from compose so it can be evaluated as a bool in order
            # to decide whether any ODCS composes will be created
            self.compose.pop('inherit', None)
        assert meth is None or meth in CONTAINER_BUILD_METHODS, (
               "unknown build method '{}' specified in {}; also, schema validated it."
               .format(meth, REPO_CONTAINER_CONFIG)
        )
    def read_configs(self):
        self.odcs_config = get_config(self.workflow).get_odcs_config()
        if not self.odcs_config:
            raise SkipResolveComposesPlugin('ODCS config not found')

        data = self.workflow.source.config.compose
        if not data and not self.all_compose_ids:
            raise SkipResolveComposesPlugin(
                '"compose" config not set and compose_ids not given')

        workdir = self.workflow.source.get_build_file_path()[1]
        file_path = os.path.join(workdir, REPO_CONTENT_SETS_CONFIG)
        pulp_data = None
        if os.path.exists(file_path):
            pulp_data = read_yaml_from_file_path(
                file_path, 'schemas/content_sets.json') or {}

        platforms = get_platforms(self.workflow)
        if platforms:
            platforms = sorted(
                platforms)  # sorted to keep predictable for tests

        self.compose_config = ComposeConfig(data,
                                            pulp_data,
                                            self.odcs_config,
                                            arches=platforms)
        if self.compose_config.has_complete_repos():
            self.has_complete_repos = True
    def read_url_requests(self):
        file_path = os.path.join(self.workdir, self.URL_REQUESTS_FILENAME)
        if not os.path.exists(file_path):
            self.log.debug('%s not found', self.URL_REQUESTS_FILENAME)
            return []

        return util.read_yaml_from_file_path(file_path, 'schemas/fetch-artifacts-url.json')
Exemple #7
0
    def read_url_requests(self):
        file_path = os.path.join(self.workdir, self.URL_REQUESTS_FILENAME)
        if not os.path.exists(file_path):
            self.log.debug('%s not found', self.URL_REQUESTS_FILENAME)
            return []

        return util.read_yaml_from_file_path(
            file_path, 'schemas/fetch-artifacts-url.json') or []
    def read_nvr_requests(self):
        file_path = os.path.join(self.workdir, self.NVR_REQUESTS_FILENAME)
        if not os.path.exists(file_path):
            self.log.debug('%s not found', self.NVR_REQUESTS_FILENAME)
            return []

        nvr_requests = util.read_yaml_from_file_path(file_path, 'schemas/fetch-artifacts-nvr.json')
        return [NvrRequest(**nvr_request) for nvr_request in nvr_requests]
Exemple #9
0
    def read_nvr_requests(self):
        file_path = os.path.join(self.workdir, self.NVR_REQUESTS_FILENAME)
        if not os.path.exists(file_path):
            self.log.debug('%s not found', self.NVR_REQUESTS_FILENAME)
            return []

        nvr_requests = util.read_yaml_from_file_path(
            file_path, 'schemas/fetch-artifacts-nvr.json') or []
        return [NvrRequest(**nvr_request) for nvr_request in nvr_requests]
Exemple #10
0
def test_read_yaml_file_or_yaml(tmpdir, from_file, config):
    expected = yaml.safe_load(config)

    if from_file:
        config_path = os.path.join(str(tmpdir), 'config.yaml')
        with open(config_path, 'w') as fp:
            fp.write(config)
        output = read_yaml_from_file_path(config_path, 'schemas/config.json')
    else:
        output = read_yaml(config, 'schemas/config.json')

    assert output == expected
Exemple #11
0
    def run(self):
        """
        Run the plugin

        Parse and validate config.
        Store in workflow workspace for later retrieval.
        """
        if self.reactor_config_map:
            self.log.info("reading config from REACTOR_CONFIG env variable")
            conf = read_yaml(self.reactor_config_map, 'schemas/config.json')
        else:
            config_filename = os.path.join(self.config_path, self.basename)
            self.log.info("reading config from %s", config_filename)
            conf = read_yaml_from_file_path(config_filename,
                                            'schemas/config.json')
        reactor_conf = ReactorConfig(conf)
        workspace = self.workflow.plugin_workspace.setdefault(self.key, {})
        workspace[WORKSPACE_CONF_KEY] = reactor_conf

        self.log.info("reading config content %s", reactor_conf.conf)

        # need to stash this on the workflow for access in a place that can't import this module
        buildstep_aliases = get_buildstep_alias(self.workflow)
        default_image_build_method = get_default_image_build_method(
            self.workflow)
        source_image_build_method = self.workflow.builder.source.config.image_build_method

        if source_image_build_method in buildstep_aliases:
            source_image_build_method = buildstep_aliases[
                source_image_build_method]
        if default_image_build_method in buildstep_aliases:
            default_image_build_method = buildstep_aliases[
                default_image_build_method]

        if (source_image_build_method == CONTAINER_BUILDAH_BUILD_METHOD or
                default_image_build_method == CONTAINER_BUILDAH_BUILD_METHOD):
            raise NotImplementedError(
                '{} method not yet fully implemented'.format(
                    CONTAINER_BUILDAH_BUILD_METHOD))

        self.workflow.builder.source.config.image_build_method = source_image_build_method
        self.workflow.default_image_build_method = default_image_build_method
        self.workflow.builder.tasker.build_method = (
            source_image_build_method or default_image_build_method)

        # set source registry and organization
        if self.workflow.builder.dockerfile_images:
            source_registry_docker_uri = get_source_registry(
                self.workflow)['uri'].docker_uri
            organization = get_registries_organization(self.workflow)
            self.workflow.builder.dockerfile_images.set_source_registry(
                source_registry_docker_uri, organization)
Exemple #12
0
    def from_cli_args(cls, args: dict):
        """Create a TaskParams instance from CLI arguments."""
        args = cls._drop_known_unset_args(args)
        params_str = args.pop("user_params", None)
        params_file = args.pop("user_params_file", None)

        if params_str:
            user_params = util.read_yaml(params_str, cls.user_params_schema)
        elif params_file:
            user_params = util.read_yaml_from_file_path(
                params_file, cls.user_params_schema)
        else:
            raise ValueError(
                "Did not receive user params. User params are currently required."
            )

        return cls(**args, user_params=user_params)
Exemple #13
0
    def __init__(self, build_path):
        self.data = {}
        self.file_path = os.path.join(build_path, REPO_CONTAINER_CONFIG)
        if os.path.exists(self.file_path):
            try:
                # read file and validate against schema
                self.data = read_yaml_from_file_path(
                    self.file_path, 'schemas/container.json') or {}
            except Exception:
                logger.exception(
                    "Failed to load and validate source config YAML from %s",
                    self.file_path)
                raise

        self.autorebuild = self.data.get('autorebuild') or {}
        self.flatpak = self.data.get('flatpak')
        self.compose = self.data.get('compose')
        self.image_build_method = meth = self.data.get('image_build_method')
        assert meth is None or meth in CONTAINER_BUILD_METHODS, (
            "unknown build method '{}' specified in {}; also, schema validated it."
            .format(meth, REPO_CONTAINER_CONFIG))
    def run(self):
        """
        Run the plugin

        Parse and validate config.
        Store in workflow workspace for later retrieval.
        """
        if self.reactor_config_map:
            self.log.info("reading config from REACTOR_CONFIG env variable")
            conf = read_yaml(self.reactor_config_map, 'schemas/config.json')
        else:
            config_filename = os.path.join(self.config_path, self.basename)
            self.log.info("reading config from %s", config_filename)
            conf = read_yaml_from_file_path(config_filename, 'schemas/config.json')
        reactor_conf = ReactorConfig(conf)
        workspace = self.workflow.plugin_workspace.setdefault(self.key, {})
        workspace[WORKSPACE_CONF_KEY] = reactor_conf

        self.log.info("reading config content %s", reactor_conf.conf)

        # need to stash this on the workflow for access in a place that can't import this module
        self.workflow.default_image_build_method = get_default_image_build_method(self.workflow)
    def run(self):
        """
        Run the plugin

        Parse and validate config.
        Store in workflow workspace for later retrieval.
        """
        if self.reactor_config_map:
            self.log.info("reading config from REACTOR_CONFIG env variable")
            conf = read_yaml(self.reactor_config_map, 'schemas/config.json')
        else:
            config_filename = os.path.join(self.config_path, self.basename)
            self.log.info("reading config from %s", config_filename)
            conf = read_yaml_from_file_path(config_filename,
                                            'schemas/config.json')
        reactor_conf = ReactorConfig(conf)
        workspace = self.workflow.plugin_workspace.setdefault(self.key, {})
        workspace[WORKSPACE_CONF_KEY] = reactor_conf

        self.log.info("reading config content %s", reactor_conf.conf)

        # need to stash this on the workflow for access in a place that can't import this module
        self.workflow.default_image_build_method = get_default_image_build_method(
            self.workflow)