def test_find_py_intrprtr_in_fedora_image(image_name, found): build = Build() build.base_image = image_name build.target_image = "starena" bb = BuildahBuilder(build) try: assert bb.find_python_interpreter() except RuntimeError: if found: # interpreter should have been found raise
def test_file_caching_mechanism(tmpdir, target_image, application, build): """ make sure that we don't load from cache when a file was changed """ t = str(tmpdir) pb_name = "file_caching.yaml" test_file_name = "a_bag_of_fun" file_caching_pb = os.path.join(data_dir, pb_name) p = os.path.join(t, pb_name) test_file = os.path.join(data_dir, test_file_name) f = os.path.join(t, test_file_name) shutil.copy(file_caching_pb, p) shutil.copy(test_file, f) with open(p) as fd: d = yaml.safe_load(fd) d[0]["tasks"][0]["copy"]["src"] = f with open(p, "w") as fd: yaml.safe_dump(d, fd) build.playbook_path = p second_build = Build.from_json(build.to_dict()) cached_build = Build.from_json(build.to_dict()) application.build(build) build = application.db.get_build(build.build_id) assert len(build.layers) == 2 assert build.layers[0].cached assert not build.layers[1].cached # ideally this would be cached, but isn't now application.build(cached_build) cached_build = application.db.get_build(cached_build.build_id) assert len(cached_build.layers) == 2 assert cached_build.layers[0].cached # since ansible doesn't track files and whether they changed, let's just make sure it works we expect it to work assert not cached_build.layers[1].cached # and now we test that if we change the file, it's not loaded from cache fun_content = "Much more fun, fun, fun!" with open(f, "w") as fd: fd.write(fun_content) application.build(second_build) second_build = application.db.get_build(second_build.build_id) assert not second_build.layers[1].cached builder = application.get_builder(second_build) out = builder.run(second_build.target_image, ["cat", "/fun"]) assert out == fun_content
def test_no_cache_tag(target_image, application, build): """ utilize a playbook which halts caching """ dont_cache_b = Build.from_json(build.to_dict()) build.playbook_path = dont_cache_playbook_path_pre application.build(build) build = application.db.get_build(build.build_id) assert len(build.layers) == 4 assert build.layers[0].cached assert not build.layers[1].cached assert not build.layers[2].cached assert not build.layers[3].cached dont_cache_b.target_image += "2" dont_cache_b.playbook_path = dont_cache_playbook_path application.build(dont_cache_b) dont_cache_b = application.db.get_build(dont_cache_b.build_id) assert len(dont_cache_b.layers) == 4 assert dont_cache_b.layers[0].cached assert dont_cache_b.layers[1].cached assert not dont_cache_b.layers[2].cached assert not dont_cache_b.layers[3].cached builder = application.get_builder(dont_cache_b) builder.run(dont_cache_b.target_image, ["ls", "-1", "/asd"])
def build(target_image): build = Build() build.playbook_path = basic_playbook_path build.base_image = base_image build.target_image = target_image build.metadata = ImageMetadata() build.state = BuildState.NEW build.builder_name = "buildah" # test with all builders return build
def load_builds(self): """ provide a list of all available builds :return: a list of Build instances """ with self.acquire(): data = self._load() return [Build.from_json(b) for b in data["builds"].values()]
def test_caching(target_image, application, build): b2 = Build.from_json(build.to_dict()) application.build(build) b2.build_id = None b2.layers = [] b2.target_image += "2" application.build(b2) build = application.db.get_build(build.build_id) b2 = application.db.get_build(b2.build_id) assert [x.layer_id for x in b2.layers[:3]] == [y.layer_id for y in build.layers[:3]] assert not b2.layers[4].cached assert not build.layers[4].cached assert len(build.layers) == 5
def _build(self): metadata = ImageMetadata() if self.args.workdir: metadata.working_dir = self.args.workdir if self.args.labels: for label in self.args.labels: err_msg = "Label variable {} doesn't seem to be " + \ "specified in format 'KEY=VALUE'.".format(label) k, v = split_once_or_fail_with(label, "=", err_msg) metadata.labels[k] = v if self.args.env_vars: for e_v in self.args.env_vars: err_msg = "Environment variable {} doesn't seem to be " + \ "specified in format 'KEY=VALUE'.".format(e_v) k, v = split_once_or_fail_with(e_v, "=", err_msg) metadata.env_vars[k] = v if self.args.cmd: metadata.cmd = self.args.cmd if self.args.user: metadata.user = self.args.user if self.args.ports: metadata.ports = self.args.ports if self.args.runtime_volumes: metadata.volumes = self.args.runtime_volumes build = Build() # we should have a helper for this build.playbook_path = self.args.playbook_path build.build_volumes = self.args.build_volumes build.metadata = metadata build.state = BuildState.NEW build.base_image = self.args.base_image build.target_image = self.args.target_image build.builder_name = self.args.builder build.cache_tasks = not self.args.no_cache self.app.build(build)
def test_caching_mechanism(target_image, application, build): """ check that previously executed tasks are being loaded from cache and new ones are computed from scratch """ small_build = Build.from_json(build.to_dict()) small_build.target_image += "2" small_build.playbook_path = small_basic_playbook_path application.build(small_build) small_build = application.db.get_build(small_build.build_id) assert len(small_build.layers) == 2 assert small_build.layers[0].cached assert not small_build.layers[1].cached application.build(build) build = application.db.get_build(build.build_id) assert len(build.layers) == 5 assert build.layers[0].cached assert build.layers[1].cached assert not build.layers[2].cached assert not build.layers[3].cached assert not build.layers[4].cached
def _load_build(data, build_id): try: return Build.from_json(data["builds"][build_id]) except KeyError: raise RuntimeError("There is no such build with ID %s" % build_id)