def read(self): """ Read the meta.json file into this object instance. """ # Remove any current data self.clear() # Load the file self.update(load_json(self._meta_path))
def read(self): """ Read the meta.json file into this object instance. """ # Remove any current data self.clear() # Read under a lock to prevent race conditions self.update(load_json(self.path)) self.validate()
def __get_json(self, name): """ Read in the json file in, and decode it. :param name: name of the json file to read-in :type name: str :returns: dict """ file_path = self.__file(name) log.debug("Reading in %s", file_path) return load_json(file_path)
def test_write_and_load_json(tmpdir): """ Ensure write_json writes loadable json """ data = { 'test': ['data'], } path = os.path.join(tmpdir, 'data.json') cmdlib.write_json(path, data) # Ensure the file exists assert os.path.isfile(path) # Ensure the data matches assert cmdlib.load_json(path) == data
def get_artifact_meta(self, artifact, unmerged=False): """ Return just a dict of the artifact """ data = self.dict() dn = os.path.dirname(self.path) alt_path = os.path.join(dn, f"meta.{artifact}.json") if (os.path.exists(alt_path) and unmerged is True and self.get(COSA_DELAYED_MERGE) is True): data = load_json(alt_path) return { "images": { artifact: data.get("images", {}).get(artifact, {}) }, artifact: data.get(artifact) }
def __get_json(self, name): """ Read in the json file in, and decode it. :param name: name of the json file to read-in :type name: str :returns: dict """ file_path = self.__file(name) log.debug("Reading in %s", file_path) try: return load_json(file_path) except FileNotFoundError: e = self._exceptions.get(name) if e: raise e(f"{file_path} is required") else: return {}
def __init__(self, workdir=None): self._workdir = workdir self._fn = self._path(BUILDFILES['list']) if not os.path.isdir(self._path("builds")): raise Exception("No builds/ dir found!") elif os.path.isfile(self._fn): self._data = load_json(self._fn) else: # must be a new workdir; use new schema self._data = {'schema-version': "1.0.0", 'builds': []} self.flush() self._version = semver.parse_version_info( self._data.get('schema-version', "0.0.1")) # we understand < 2.0.0 only if self._version._major >= 2: raise Exception("Builds schema too new; please update cosa") if self._version._major < 1: err = f"Unsupported build metadata version {self._version}" raise SystemExit(err)
def test_merge_meta(tmpdir): """ Verifies merging meta.json works as expected. """ x = None y = None aws = { "path": "/dev/null", "size": 99999999, "sha256": "ff279bc0207964d96571adfd720b1af1b65e587e589eee528d0315b7fb298773" } def get_aws(x, key="path"): return x.get("images", {}).get("aws", {}).get(key) for meta_f in os.listdir(TEST_META_PATH): test_meta = os.path.join(TEST_META_PATH, meta_f) with open(test_meta, 'r') as valid_data: td = json.load(valid_data) m = meta.GenericBuildMeta(_create_test_files(tmpdir, meta_data=td), '1.2.3') w = meta.GenericBuildMeta(_create_test_files(tmpdir, meta_data=td), '1.2.3') # create working copies if x is None: x = copy.deepcopy(m) else: y = copy.deepcopy(m) # add the stamp m.write() old_stamp = m.get(meta.COSA_VER_STAMP) assert old_stamp is not None # check merging old into new m["images"]["aws"] = aws m[meta.COSA_VER_STAMP] = 10 m.write() new_stamp = m.get(meta.COSA_VER_STAMP) assert new_stamp > old_stamp assert get_aws(m) != aws["path"] # Now go full yolo and attempt to merge RHCOS into FCOS # Srly? Whose going to do this... y._meta_path = x.path with pytest.raises(meta.COSAMergeError): x.write() #### Artifact merging tests # clear the meta.json that's been corrupted os.unlink(x.path) # test that write went to meta.json maws = x.write() assert x.path == maws # test that write went to meta.aws.json x.set("coreos-assembler.delayed-meta-merge", True) maws = x.write(artifact_name="aws") assert maws.endswith("aws.json") # make sure that meta.json != meta.aws.json x.read() d = load_json(maws) assert get_aws(m) != get_aws(d) # test that the write went to meta.<TS>.json tnw = x.write() assert maws != tnw