Beispiel #1
0
 def __init__(self, playbook_path):
     """
     :param playbook_path: str, path to playbook
     """
     self.playbook_path = playbook_path
     self.build = Build()
     self.metadata = ImageMetadata()
     self.build.metadata = self.metadata
Beispiel #2
0
def build(target_image):
    build = Build()
    build.debug = True
    build.playbook_path = basic_playbook_path
    build.base_image = base_image
    build.target_image = target_image
    build.metadata = ImageMetadata()
    build.state = BuildState.NEW
    build.builder_name = "buildah"  # test with all builders
    return build
Beispiel #3
0
def build_inside_openshift(app):
    """
    This is expected to run inside an openshift pod spawned via custom build

    :param app: instance of Application
    """
    playbook_path, base_image = okd_get_playbook_base()

    if playbook_path.startswith("/"):
        raise RuntimeError(
            "The path to playbook needs to be relative within the git repo.")

    uri, ref, target_image = okd_load_metadata()

    tmp = tempfile.mkdtemp(prefix="ab-okd")

    try:
        git_clone_to_path(uri, tmp, ref=ref)

        playbook_path = os.path.abspath(os.path.join(tmp, playbook_path))
        if not playbook_path.startswith(tmp):
            raise RuntimeError(
                "The path to playbook points outside of the git repo, this is not allowed."
            )

        build = Build()
        build.metadata = ImageMetadata()  # TODO: needs to be figured out
        build.playbook_path = playbook_path
        build.base_image = base_image
        build.target_image = target_image
        build.builder_name = "buildah"
        build.cache_tasks = False  # we have local storage in pod, so this doesn't make any sense
        app.build(build)

    finally:
        shutil.rmtree(tmp)
Beispiel #4
0
class PbVarsParser:
    def __init__(self, playbook_path):
        """
        :param playbook_path: str, path to playbook
        """
        self.playbook_path = playbook_path
        self.build = Build()
        self.metadata = ImageMetadata()
        self.build.metadata = self.metadata

    def _check_selinux_iz_gud(self):
        """
        This is a workaround for a weird behavior of ansible: if selinux is
        in the enforcing mode and python3-libselinux is not installed, ansible freezes

        https://bugzilla.redhat.com/show_bug.cgi?id=1696706
        :return:
        """
        try:
            enforcing_status = Path("/sys/fs/selinux/enforce").read_text()
        except FileNotFoundError:
            logger.debug(
                "this system is not using selinux, /sys/fs/selinux/enforce is not present"
            )
            return
        logger.debug(f"selinux enforce status = {enforcing_status}")
        # it can be enforcing or not, selinux python module needs to be present
        try:
            importlib.import_module("selinux")
        except ModuleNotFoundError:
            raise RuntimeError(
                "\nThis system is using selinux(8) and selinux python module is not installed. "
                "There is a known issue in ansible that it freezes in this setup:\n"
                "  https://bugzilla.redhat.com/show_bug.cgi?id=1696706\n"
                "Please install libselinux python bindings (on Fedora the package name is python3-libselinux)."
            )

    def expand_pb_vars(self):
        """
        populate vars from a playbook, defined in vars section

        :return: dict with the content of ansible_bender var
        """
        self._check_selinux_iz_gud()
        with open(self.playbook_path) as fd:
            plays = yaml.safe_load(fd)

        for play in plays[1:]:
            bender_vars = graceful_get(play, "vars", "ansible_bender")
            if bender_vars:
                logger.warning(
                    "Variables are loaded only from the first play.")

        try:
            # we care only about the first play, we don't want to merge dicts
            d = plays[0]
        except IndexError:
            raise RuntimeError(
                "Invalid playbook, can't access the first document.")

        bender_vars = graceful_get(d, "vars", "ansible_bender")
        if not bender_vars:
            logger.info("no bender data found in the playbook")
            return {}

        tmp = tempfile.mkdtemp(prefix="ab")
        json_data_path = os.path.join(tmp, "j.json")

        # we cannot use "vars" variable because the variables are not expanded in there
        pb_vars = copy.deepcopy(d["vars"])
        while True:
            # just in case the variable is already defined
            timestamp = datetime.datetime.now().strftime(
                TIMESTAMP_FORMAT_TOGETHER)
            ab_vars_key = f"ab_vars_{timestamp}"
            if ab_vars_key not in pb_vars:
                logger.debug("ab vars key = %s", ab_vars_key)
                pb_vars[ab_vars_key] = d["vars"]
                break
        jinja_pb_vars_key = '{{ %s }}' % ab_vars_key
        pb = {
            "name":
            "Let Ansible expand variables",
            "hosts":
            "localhost",
            "vars":
            pb_vars,
            "vars_files":
            d.get("vars_files", []),
            "gather_facts":
            False,
            "tasks": [{
                "debug": {
                    "msg": jinja_pb_vars_key
                }
            }, {
                "copy": {
                    "dest": json_data_path,
                    "content": jinja_pb_vars_key
                }
            }]
        }

        i_path = os.path.join(tmp, "i")
        with open(i_path, "w") as fd:
            fd.write("localhost ansible_connection=local")

        # json is easier to parse than yaml
        tmp_pb_path = os.path.join(tmp, "p.json")
        with open(tmp_pb_path, "w") as fd:
            json.dump([pb], fd)

        playbook_base = os.path.basename(self.playbook_path).split(".", 1)[0]
        symlink_name = f".{playbook_base}-{timestamp}-{random_str()}.yaml"
        playbook_dir = os.path.dirname(self.playbook_path)
        symlink_path = os.path.join(playbook_dir, symlink_name)
        os.symlink(tmp_pb_path, symlink_path)

        # yeah, ansible is not very smart for connection=local
        args = ["-e", f"ansible_python_interpreter={sys.executable}"]

        try:
            run_playbook(symlink_path,
                         i_path,
                         None,
                         connection="local",
                         try_unshare=False,
                         provide_output=False,
                         log_stderr=True,
                         ansible_args=args)

            with open(json_data_path) as fd:
                return json.load(fd)["ansible_bender"]
        finally:
            os.unlink(symlink_path)
            shutil.rmtree(tmp)

    def process_pb_vars(self, bender_data):
        """
        accept variables from the playbook and update the Build and ImageMetadata objects with them

        :param bender_data: dict with the content of ansible_bender vars
        :return:
        """
        if not bender_data:
            return
        try:
            # validation to error out unknown keys in /vars/ansible_bender
            jsonschema.validate(bender_data, PLAYBOOK_SCHEMA)
        except jsonschema.ValidationError as validation_error:
            if validation_error.validator == "type":
                # error is due to invalid value datatype
                path = "/" + "/".join(validation_error.path)
                expected_types = validation_error.validator_value
                if isinstance(validation_error.validator_value, list):
                    expected_types = ", ".join(
                        validation_error.validator_value)
                message = f"variable {path} is set to {validation_error.instance}" \
                          f", which is not of type {expected_types}"
                raise ABValidationError(message) from validation_error
            else:
                # error is due to absence of a required key, unknown keys playbook or any other kind
                raise ABValidationError(
                    validation_error.message) from validation_error

        self.metadata.update_from_configuration(
            bender_data.get("target_image", {}))
        self.build.update_from_configuration(bender_data)

    def get_build_and_metadata(self):
        """
        extra vars from the selected playbook

        :return: Build(), ImageMetadata()
        """
        bender_data = self.expand_pb_vars()

        self.process_pb_vars(bender_data)

        return self.build, self.metadata
class PbVarsParser:
    def __init__(self, playbook_path):
        """
        :param playbook_path: str, path to playbook
        """
        self.playbook_path = playbook_path
        self.build = Build()
        self.metadata = ImageMetadata()
        self.build.metadata = self.metadata

    def expand_pb_vars(self):
        """
        populate vars from a playbook, defined in vars section

        :return: dict with the content of ansible_bender var
        """
        with open(self.playbook_path) as fd:
            plays = yaml.safe_load(fd)

        for play in plays[1:]:
            bender_vars = graceful_get(play, "vars", "ansible_bender")
            if bender_vars:
                logger.warning(
                    "Variables are loaded only from the first play.")

        try:
            # we care only about the first play, we don't want to merge dicts
            d = plays[0]
        except IndexError:
            raise RuntimeError(
                "Invalid playbook, can't access the first document.")

        bender_vars = graceful_get(d, "vars", "ansible_bender")
        if not bender_vars:
            logger.info("no bender data found in the playbook")
            return {}

        tmp = tempfile.mkdtemp(prefix="ab")
        json_data_path = os.path.join(tmp, "j.json")

        # we cannot use "vars" variable because the variables are not expanded in there
        pb_vars = copy.deepcopy(d["vars"])
        while True:
            # just in case the variable is already defined
            timestamp = datetime.datetime.now().strftime(
                TIMESTAMP_FORMAT_TOGETHER)
            ab_vars_key = f"ab_vars_{timestamp}"
            if ab_vars_key not in pb_vars:
                logger.debug("ab vars key = %s", ab_vars_key)
                pb_vars[ab_vars_key] = d["vars"]
                break
        jinja_pb_vars_key = '{{ %s }}' % ab_vars_key
        pb = {
            "name":
            "Let Ansible expand variables",
            "hosts":
            "localhost",
            "vars":
            pb_vars,
            "vars_files":
            d.get("vars_files", []),
            "gather_facts":
            False,
            "tasks": [{
                "debug": {
                    "msg": jinja_pb_vars_key
                }
            }, {
                "copy": {
                    "dest": json_data_path,
                    "content": jinja_pb_vars_key
                }
            }]
        }

        i_path = os.path.join(tmp, "i")
        with open(i_path, "w") as fd:
            fd.write("localhost ansible_connection=local")

        # json is easier to parse than yaml
        tmp_pb_path = os.path.join(tmp, "p.json")
        with open(tmp_pb_path, "w") as fd:
            json.dump([pb], fd)

        playbook_base = os.path.basename(self.playbook_path).split(".", 1)[0]
        symlink_name = f".{playbook_base}-{timestamp}-{random_str()}.yaml"
        playbook_dir = os.path.dirname(self.playbook_path)
        symlink_path = os.path.join(playbook_dir, symlink_name)
        os.symlink(tmp_pb_path, symlink_path)

        # yeah, ansible is not very smart for connection=local
        args = ["-e", f"ansible_python_interpreter={sys.executable}"]

        try:
            run_playbook(symlink_path,
                         i_path,
                         None,
                         connection="local",
                         try_unshare=False,
                         provide_output=False,
                         log_stderr=True,
                         ansible_args=args)

            with open(json_data_path) as fd:
                return json.load(fd)["ansible_bender"]
        finally:
            os.unlink(symlink_path)
            shutil.rmtree(tmp)

    def process_pb_vars(self, bender_data):
        """
        accept variables from the playbook and update the Build and ImageMetadata objects with them

        :param bender_data: dict with the content of ansible_bender vars
        :return:
        """
        if not bender_data:
            return
        self.metadata.update_from_configuration(
            bender_data.get("target_image", {}))
        self.build.update_from_configuration(bender_data)

    def get_build_and_metadata(self):
        """
        extra vars from the selected playbook

        :return: Build(), ImageMetadata()
        """
        bender_data = self.expand_pb_vars()

        self.process_pb_vars(bender_data)

        return self.build, self.metadata