def _store_publisher_info(uri_pub_map, no_uri_pubs, image_dir): """Stores a given pair of (uri_pub_map, no_uri_pubs) objects to a configuration cache file beneath image_dir.""" cache_path = os.path.join( image_dir, pkg.client.global_settings.sysrepo_pub_cache_path) cache_dir = os.path.dirname(cache_path) try: if not os.path.exists(cache_dir): os.makedirs(cache_dir, 0o700) try: # if the cache exists, it must be a file st_cache = os.lstat(cache_path) if not stat.S_ISREG(st_cache.st_mode): raise IOError("not a regular file") except IOError as e: # IOError has been merged into OSError in Python 3.4, # so we need a special case here. if str(e) == "not a regular file": raise except OSError: pass with open(cache_path, "w") as cache_file: json.dump((uri_pub_map, no_uri_pubs), cache_file, indent=4) os.chmod(cache_path, 0o600) except IOError as e: error( _("Unable to store config to {cache_path}: {e}").format( **locals()))
def _save(self, fobj, reset_volatiles=False): """Save a json encoded representation of this plan description objects into the specified file object.""" state = PlanDescription.getstate(self, reset_volatiles=reset_volatiles) try: fobj.truncate() json.dump(state, fobj) fobj.flush() except OSError as e: # Access to protected member; pylint: disable=W0212 raise apx._convert_error(e) del state
def test_unprived_operation(self): """Test that pkg freeze and unfreeze display the frozen packages without needing privs, and that they don't stack trace when run without privs.""" self.api_obj = self.image_create(self.rurl) self.pkg("freeze", su_wrap=True) self.pkg("freeze [email protected]", su_wrap=True, exit=1) self.pkg("unfreeze foo", su_wrap=True, exit=1) self.pkg("freeze [email protected]") self.pkg("freeze -H", su_wrap=True) tmp = self.output.split() self.assertEqualDiff("foo", tmp[0]) self.assertEqualDiff("1.0", tmp[1]) self.assertTrue("None" in self.output) self.pkg("unfreeze -H", su_wrap=True) tmp = self.output.split() self.assertEqualDiff("foo", tmp[0]) self.assertEqualDiff("1.0", tmp[1]) self.assertTrue("None" in self.output) # Test that if the freeze file can't be read, we handle the # exception appropriately. pth = os.path.join(self.img_path(), "var", "pkg", "state", "frozen_dict") mod = stat.S_IMODE(os.stat(pth)[stat.ST_MODE]) new_mod = mod & ~stat.S_IROTH os.chmod(pth, new_mod) self.pkg("freeze", exit=1, su_wrap=True) self.pkg("unfreeze", exit=1, su_wrap=True) os.chmod(pth, mod) # Make sure that we can read the file again. self.pkg("freeze", su_wrap=True) # Test that we don't stack trace if the version is unexpected. version, d = json.load(open(pth)) with open(pth, "w") as fh: json.dump((-1, d), fh) self.pkg("freeze", exit=1) self.pkg("unfreeze", exit=1)
def write(fileobj, pubs, cfg): """Writes the publisher, repository, and provided package names to the provided file-like object 'fileobj' in JSON p5i format. 'fileobj' is an object that has a 'write' method that accepts data to be written as a parameter. 'pubs' is a list of Publisher objects. 'cfg' is an ImageConfig which contains the properties of the image on which the generated p5s file is based.""" def transform_uris(urls, prefix): res = [] for u in urls: m = copy.copy(u) if m.scheme == "http": res.append(m.uri) elif m.scheme == "https": # The system depot handles connecting to the # proxied https repositories, so the client # should communicate over http to prevent it # from doing tunneling. m.change_scheme("http") res.append(m.uri) elif m.scheme == "file": # The system depot provides direct access to # file repositories. The token <sysrepo> will # be replaced in the client with the url it uses # to communicate with the system repository. res.append("http://{0}/{1}/{2}".format( publisher.SYSREPO_PROXY, prefix, digest.DEFAULT_HASH_FUNC(force_bytes( m.uri.rstrip("/"))).hexdigest())) else: assert False, "{0} is an unknown scheme.".format(u.scheme) # Remove duplicates, since the system-repository can only # provide one path to a given origin. This can happen if the # image has eg. two origins/mirrors configured for a publisher, # with one using http and the other using https, but both using # the same netloc and path. # We want to preserve origin/mirror order, so simply casting # into a set is not appropriate. values = set() res_unique = [] for item in res: if item not in values: values.add(item) res_unique.append(item) return res_unique dump_struct = { "publishers": [], "image_properties": {}, "version": CURRENT_VERSION, } dpubs = dump_struct["publishers"] prefixes = set() for p in pubs: d = None if p.repository: r = p.repository reg_uri = "" mirrors = transform_uris([u for u in r.mirrors if not u.disabled], p.prefix) origins = transform_uris([u for u in r.origins if not u.disabled], p.prefix) d = { "collection_type": r.collection_type, "description": r.description, "legal_uris": [u.uri for u in r.legal_uris], "mirrors": mirrors, "name": r.name, "origins": origins, "refresh_seconds": r.refresh_seconds, "related_uris": [u.uri for u in r.related_uris], } dpub = { "alias": p.alias, "name": p.prefix, "repository": d, "sticky": p.sticky, } sp = p.properties.get("signature-policy") if sp and sp != DEF_TOKEN: dpub["signature-policy"] = sp srn = p.properties.get("signature-required-names") if srn: dpub["signature-required-names"] = \ p.properties["signature-required-names"] dpubs.append(dpub) prefixes.add(p.prefix) dump_struct["image_properties"]["publisher-search-order"] = [ p for p in cfg.get_property("property", "publisher-search-order") if p in prefixes ] sig_pol = cfg.get_property("property", "signature-policy") if sig_pol != DEF_TOKEN: dump_struct["image_properties"]["signature-policy"] = sig_pol req_names = cfg.get_property("property", "signature-required-names") if req_names: dump_struct["image_properties"]["signature-required-names"] = \ req_names json.dump(dump_struct, fileobj, ensure_ascii=False, allow_nan=False, indent=2, sort_keys=True) fileobj.write("\n")
def write(fileobj, pubs, pkg_names=None): """Writes the publisher, repository, and provided package names to the provided file-like object 'fileobj' in JSON p5i format. 'fileobj' is an object that has a 'write' method that accepts data to be written as a parameter. 'pkg_names' is a dict of lists, tuples, or sets indexed by publisher prefix that contain package names, FMRI strings, or FMRI objects. A prefix of "" can be used for packages that are that are not specific to a publisher. 'pubs' is a list of Publisher objects.""" dump_struct = { "packages": [], "publishers": [], "version": CURRENT_VERSION, } if pkg_names is None: pkg_names = {} def copy_pkg_names(source, dest): for entry in source: # Publisher information is intentionally # omitted as association with this specific # publisher is implied by location in the # output. if isinstance(entry, fmri.PkgFmri): dest.append(entry.get_fmri(anarchy=True)) else: dest.append(str(entry)) dpubs = dump_struct["publishers"] for p in pubs: dpub = { "alias": p.alias, "name": p.prefix, "packages": [], "repositories": [] } dpubs.append(dpub) try: copy_pkg_names(pkg_names[p.prefix], dpub["packages"]) except KeyError: pass drepos = dpub["repositories"] if p.repository: r = p.repository reg_uri = "" if r.registration_uri: reg_uri = r.registration_uri.uri drepos.append({ "collection_type": r.collection_type, "description": r.description, "legal_uris": [u.uri for u in r.legal_uris], "mirrors": [u.uri for u in r.mirrors], "name": r.name, "origins": [u.uri for u in r.origins], "refresh_seconds": r.refresh_seconds, "registration_uri": reg_uri, "related_uris": [u.uri for u in r.related_uris], }) try: copy_pkg_names(pkg_names[""], dump_struct["packages"]) except KeyError: pass json.dump(dump_struct, fileobj, ensure_ascii=False, allow_nan=False, indent=2, sort_keys=True) fileobj.write("\n")