def update_from_configuration(self, data): """ update current object with data provided from Ansible vars """ self.build_volumes += graceful_get(data, "working_container", "volumes", default=[]) self.build_user = graceful_get(data, "working_container", "user") self.base_image = graceful_get(data, "base_image") self.target_image = graceful_get(data, "target_image", "name") # self.builder_name = None self.cache_tasks = graceful_get(data, "cache_tasks", default=self.cache_tasks) self.layering = graceful_get(data, "layering", default=self.layering) self.squash = graceful_get(data, "squash", default=self.squash) self.buildah_from_extra_args = graceful_get(data, "buildah_from_extra_args") self.ansible_extra_args = graceful_get(data, "ansible_extra_args") self.verbose_layer_names = graceful_get(data, "verbose_layer_names")
def okd_load_metadata(): """ load metadata about the build from the BUILD env var """ b = env_get_or_fail_with( "BUILD", "BUILD environment variable is not set, are you running in openshift?") bd = json.loads(b) response = ( graceful_get(bd, "spec", "source", "git", "uri"), graceful_get(bd, "spec", "source", "git", "ref"), graceful_get(bd, "spec", "output", "to", "name"), ) if not all(response): raise RuntimeError("Not all build parameters seem to be set, halting.") return response
def get_docker_image_id(container_image): metadata = inspect_resource("image", container_image) if len(metadata) > 0: return graceful_get(metadata[0], "Id") return None
def from_json(cls, j): """ return Build instance from the provided json """ b = cls() b.build_id = j["build_id"] b.playbook_path = j.get("playbook_path", None) b.build_volumes = j["build_volumes"] b.build_user = graceful_get(j, "build_user") b.build_entrypoint = graceful_get(j, "build_entrypoint") b.metadata = ImageMetadata.from_json(j["metadata"]) b.state = BuildState(j["state"]) b.build_start_time = None sta = j["build_start_time"] if sta: b.build_start_time = datetime.datetime.strptime( sta, TIMESTAMP_FORMAT) b.build_finished_time = None fin = j["build_finished_time"] if fin: b.build_finished_time = datetime.datetime.strptime( fin, TIMESTAMP_FORMAT) b.base_image = j["base_image"] b.target_image = j["target_image"] b.builder_name = j["builder_name"] b.layers = [Layer.from_json(x) for x in j["layers"]] b.final_layer_id = j.get("final_layer_id", None) b.layer_index = { layer_id: Layer.from_json(layer_data) for layer_id, layer_data in j["layer_index"].items() } b.build_container = j["build_container"] b.cache_tasks = j["cache_tasks"] b.log_lines = j["log_lines"] b.layering = j["layering"] b.squash = j.get("squash", False) b.debug = j["debug"] b.verbose = j["verbose"] b.pulled = j["pulled"] b.buildah_from_extra_args = j.get("buildah_from_extra_args", None) b.ansible_extra_args = j.get("ansible_extra_args", None) b.python_interpreter = j.get("python_interpreter", None) b.verbose_layer_names = graceful_get(j, "verbose_layer_names", default=False) return b
def from_json(cls, j): """ return Metadata instance from the provided json """ m = cls() m.working_dir = j["working_dir"] m.labels = j["labels"] m.annotations = graceful_get(j, ANNOTATIONS_KEY, default={}) m.env_vars = j["env_vars"] m.cmd = j["cmd"] m.user = j["user"] m.ports = j["ports"] m.volumes = j["volumes"] return m
def get_buildah_image_id(container_image): metadata = inspect_resource("image", container_image) return graceful_get(metadata, "FromImageID")
def test_graceful_g_w_default(): inp = {1: {2: 3}} assert graceful_get(inp, 3, default="asd") == "asd" assert graceful_get(inp, 1, default="asd") == {2: 3} assert graceful_get(inp, 1, 2, default="asd") == 3 assert graceful_get(inp, 1, 2, 4, default="asd") == "asd"
def test_graceful_g(inp, path, exp): assert graceful_get(inp, *path) == exp
def expand_pb_vars(self): """ populate vars from a playbook, defined in vars section :return: dict with the content of ansible_bender var """ self._check_selinux_iz_gud() with open(self.playbook_path) as fd: plays = yaml.safe_load(fd) for play in plays[1:]: bender_vars = graceful_get(play, "vars", "ansible_bender") if bender_vars: logger.warning( "Variables are loaded only from the first play.") try: # we care only about the first play, we don't want to merge dicts d = plays[0] except IndexError: raise RuntimeError( "Invalid playbook, can't access the first document.") bender_vars = graceful_get(d, "vars", "ansible_bender") if not bender_vars: logger.info("no bender data found in the playbook") return {} tmp = tempfile.mkdtemp(prefix="ab") json_data_path = os.path.join(tmp, "j.json") # we cannot use "vars" variable because the variables are not expanded in there pb_vars = copy.deepcopy(d["vars"]) while True: # just in case the variable is already defined timestamp = datetime.datetime.now().strftime( TIMESTAMP_FORMAT_TOGETHER) ab_vars_key = f"ab_vars_{timestamp}" if ab_vars_key not in pb_vars: logger.debug("ab vars key = %s", ab_vars_key) pb_vars[ab_vars_key] = d["vars"] break jinja_pb_vars_key = '{{ %s }}' % ab_vars_key pb = { "name": "Let Ansible expand variables", "hosts": "localhost", "vars": pb_vars, "vars_files": d.get("vars_files", []), "gather_facts": False, "tasks": [{ "debug": { "msg": jinja_pb_vars_key } }, { "copy": { "dest": json_data_path, "content": jinja_pb_vars_key } }] } i_path = os.path.join(tmp, "i") with open(i_path, "w") as fd: fd.write("localhost ansible_connection=local") # json is easier to parse than yaml tmp_pb_path = os.path.join(tmp, "p.json") with open(tmp_pb_path, "w") as fd: json.dump([pb], fd) playbook_base = os.path.basename(self.playbook_path).split(".", 1)[0] symlink_name = f".{playbook_base}-{timestamp}-{random_str()}.yaml" playbook_dir = os.path.dirname(self.playbook_path) symlink_path = os.path.join(playbook_dir, symlink_name) os.symlink(tmp_pb_path, symlink_path) # yeah, ansible is not very smart for connection=local args = ["-e", f"ansible_python_interpreter={sys.executable}"] try: run_playbook(symlink_path, i_path, None, connection="local", try_unshare=False, provide_output=False, log_stderr=True, ansible_args=args) with open(json_data_path) as fd: return json.load(fd)["ansible_bender"] finally: os.unlink(symlink_path) shutil.rmtree(tmp)