def test_2_depot_p5i(self): """Verify the output of the depot /publisher operation.""" # Now update the repository configuration while the depot is # stopped so changes won't be overwritten on exit. self.__update_repo_config() # Start the depot. self.dc.start() # Then, publish some packages we can abuse for testing. durl = self.dc.get_depot_url() plist = self.pkgsend_bulk(durl, (self.info10, self.quux10, self.system10, self.zfsextras10, self.zfsutils10)) # Now, for each published package, attempt to get a p5i file # and then verify that the parsed response has the expected # package information under the expected publisher. for p in plist: purl = urlparse.urljoin(durl, "p5i/0/%s" % p) pub, pkglist = p5i.parse(location=purl)[0] # p5i files contain non-qualified FMRIs as the FMRIs # are already grouped by publisher. nq_p = fmri.PkgFmri(p).get_fmri(anarchy=True, include_scheme=False) self.assertEqual(pkglist, [nq_p]) # Try again, but only using package stems. for p in plist: stem = fmri.PkgFmri(p).pkg_name purl = urlparse.urljoin(durl, "p5i/0/%s" % stem) pub, pkglist = p5i.parse(location=purl)[0] self.assertEqual(pkglist, [stem]) # Try again, but using wildcards (which will return a list of # matching package stems). purl = urlparse.urljoin(durl, "p5i/0/zfs*") pub, pkglist = p5i.parse(location=purl)[0] self.assertEqual(pkglist, ["zfs-extras", "zfs/utils"]) # Finally, verify that a non-existent package will error out # with a httplib.NOT_FOUND. try: urllib2.urlopen(urlparse.urljoin(durl, "p5i/0/nosuchpackage")) except urllib2.HTTPError, e: if e.code != httplib.NOT_FOUND: raise
def test_2_depot_p5i(self): """Verify the output of the depot /publisher operation.""" # Now update the repository configuration while the depot is # stopped so changes won't be overwritten on exit. self.__update_repo_config() # Start the depot. self.dc.start() # Then, publish some packages we can abuse for testing. durl = self.dc.get_depot_url() plist = self.pkgsend_bulk(durl, (self.info10, self.quux10, self.system10, self.zfsextras10, self.zfsutils10)) # Now, for each published package, attempt to get a p5i file # and then verify that the parsed response has the expected # package information under the expected publisher. for p in plist: purl = urlparse.urljoin(durl, "p5i/0/{0}".format(p)) pub, pkglist = p5i.parse(location=purl)[0] # p5i files contain non-qualified FMRIs as the FMRIs # are already grouped by publisher. nq_p = fmri.PkgFmri(p).get_fmri(anarchy=True, include_scheme=False) self.assertEqual(pkglist, [nq_p]) # Try again, but only using package stems. for p in plist: stem = fmri.PkgFmri(p).pkg_name purl = urlparse.urljoin(durl, "p5i/0/{0}".format(stem)) pub, pkglist = p5i.parse(location=purl)[0] self.assertEqual(pkglist, [stem]) # Try again, but using wildcards (which will return a list of # matching package stems). purl = urlparse.urljoin(durl, "p5i/0/zfs*") pub, pkglist = p5i.parse(location=purl)[0] self.assertEqual(pkglist, ["zfs-extras", "zfs/utils"]) # Finally, verify that a non-existent package will error out # with a httplib.NOT_FOUND. try: urllib2.urlopen(urlparse.urljoin(durl, "p5i/0/nosuchpackage")) except urllib2.HTTPError as e: if e.code != httplib.NOT_FOUND: raise
def test_1_depot_publisher(self): """Verify the output of the depot /publisher operation.""" # Now update the repository configuration while the depot is # stopped so changes won't be overwritten on exit. self.__update_repo_config() # Start the depot. self.dc.start() durl = self.dc.get_depot_url() purl = urlparse.urljoin(durl, "publisher/0") entries = p5i.parse(location=purl) assert entries[0][0].prefix == "test" assert entries[1][0].prefix == "org.opensolaris.pending" # Now verify that the parsed response has the expected data. pub, pkglist = entries[-1] cfgdata = self.repo_cfg for prop in cfgdata["publisher"]: self.assertEqual(getattr(pub, prop), cfgdata["publisher"][prop]) repo = pub.repository for prop, expected in self.pub_repo_cfg.iteritems(): returned = getattr(repo, prop) if prop.endswith("uris") or prop == "origins": uris = [] for u in returned: uri = u.uri if uri.endswith("/"): uri = uri[:-1] uris.append(uri) returned = uris self.assertEqual(returned, expected)
def _parse_src(self, soft_node): src_list = soft_node.get_children(Source.SOURCE_LABEL, Source, not_found_is_err=True) src = src_list[0] pub_list = src.get_children(Publisher.PUBLISHER_LABEL, Publisher, not_found_is_err=True) pub = pub_list.pop(0) orig_list = pub.get_children(Origin.ORIGIN_LABEL, Origin, not_found_is_err=True) p5i_file = orig_list[0].origin try: self._p5i_lst = p5i.parse(location=p5i_file) except api_errors.InvalidP5IFile: raise Exception(p5i_file + " does not have the correct format") # If there are any further publishers specified, treat those # as publishers for the ips image if len(pub_list) > 0: pub = pub_list.pop(0) self._set_publisher_info(pub, preferred=True) for pub in pub_list: self._set_publisher_info(pub, preferred=False)
def _parse_input(self): self.logger.info("Reading the p5i file") if self.src is None: raise Exception("A p5i file must be specified") p5i_file = self.src self.logger.debug("p5i file specified is " + p5i_file) self._p5i_lst = p5i.parse(location=p5i_file) trans_attr = dict() for p5i_file in self._p5i_lst: pub, pkglst = p5i_file trans_attr[ACTION] = INSTALL trans_attr[CONTENTS] = pkglst trans_attr[APP_CALLBACK] = None trans_attr[PURGE_HISTORY] = None # Append the information found to the list of # transfers that will be performed if trans_attr not in self._transfer_list: self._transfer_list.append(trans_attr)
def test_parse_write_partial(self): """Verify that a p5i file with various parts of a publisher's repository configuration omitted will still parse and write as expected.""" # First, test the no repository case. # NOTE: Spaces, or lack thereof, at the end of a line, are # important. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [] } ], "version": 1 } """ pub = self.__get_bobcat_pub(omit_repo=True) # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertEqualJSON(expected, output) # Now parse the result and verify no repositories are defined. pub, pkg_names = p5i.parse(data=output)[0] self.assert_(not pub.repository) # Next, test the partial repository configuration case. No # origin is provided, but everything else is. # NOTE: Spaces, or lack thereof, at the end of a line, are # important. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [ { "collection_type": "core", "description": "xkcd.net/325", "legal_uris": [ "http://xkcd.com/license.html" ], "mirrors": [], "name": "source", "origins": [], "refresh_seconds": 43200, "registration_uri": "", "related_uris": [] } ] } ], "version": 1 } """ pub = self.__get_bobcat_pub() # Nuke the origin data. pub.repository.reset_origins() # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertEqualJSON(expected, output) # Now parse the result and verify that there is a repository, # but without origins information. pub, pkg_names = p5i.parse(data=output)[0] self.assertEqualDiff([], pub.repository.origins)
def test_parse_write(self): """Verify that the p5i parsing and writing works as expected.""" # Verify that p5i export and parse works as expected. pub = self.__get_bobcat_pub() # First, Ensure that PkgFmri and strings are supported properly. # Build a simple list of packages. fmri_foo = fmri.PkgFmri("pkg:/[email protected],5.11-0") pnames = { "bobcat": [fmri_foo], "": ["pkg:/[email protected],5.11-0", "baz"], } # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub], pkg_names=pnames) # Verify that the p5i data ends with a terminating newline. fobj.seek(-1, 2) self.assertEqual(fobj.read(), "\n") # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertEqualJSON(self.p5i_bobcat, output) def validate_results(results): # First result should be 'bobcat' publisher and its # pkg_names. pub, pkg_names = results[0] self.assertEqual(pub.prefix, "bobcat") self.assertEqual(pub.alias, "cat") repo = pub.repository self.assertEqual(repo.name, "source") self.assertEqual(repo.description, "xkcd.net/325") self.assertEqual(repo.legal_uris[0], "http://xkcd.com/license.html") self.assertEqual(repo.refresh_seconds, 43200) self.assertEqual(pkg_names, [str(fmri_foo)]) # Last result should be no publisher and a list of # pkg_names. pub, pkg_names = results[1] self.assertEqual(pub, None) self.assertEqual(pkg_names, ["pkg:/[email protected],5.11-0", "baz"]) # Verify that parse returns the expected object and information # when provided a fileobj. fobj.seek(0) validate_results(p5i.parse(fileobj=fobj)) # Verify that parse returns the expected object and information # when provided a file path. fobj.seek(0) (fd1, path1) = tempfile.mkstemp(dir=self.test_root) os.write(fd1, fobj.read()) os.close(fd1) validate_results(p5i.parse(location=path1)) # Verify that parse returns the expected object and information # when provided a file URI. location = os.path.abspath(path1) location = urlparse.urlunparse( ("file", "", urllib.pathname2url(location), "", "", "")) validate_results(p5i.parse(location=location)) fobj.close() fobj = None # Verify that appropriate exceptions are raised for p5i # information that can't be retrieved (doesn't exist). nefpath = os.path.join(self.test_root, "non-existent") self.assertRaises(api_errors.RetrievalError, p5i.parse, location="file://{0}".format(nefpath)) self.assertRaises(api_errors.RetrievalError, p5i.parse, location=nefpath) # Verify that appropriate exceptions are raised for invalid # p5i information. lcpath = os.path.join(self.test_root, "libc.so.1") location = os.path.abspath(lcpath) location = urlparse.urlunparse( ("file", "", urllib.pathname2url(location), "", "", "")) # First, test as a file:// URI. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location) # Last, test as a pathname. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location)
def test_parse_write_partial(self): """Verify that a p5i file with various parts of a publisher's repository configuration omitted will still parse and write as expected.""" # First, test the no repository case. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [] } ], "version": 1 } """ pub = self.__get_bobcat_pub(omit_repo=True) # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertPrettyEqual(output, expected) # Now parse the result and verify no repositories are defined. pub, pkg_names = p5i.parse(data=output)[0] self.assert_(not pub.repository) # Next, test the partial repository configuration case. No # origin is provided, but everything else is. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [ { "collection_type": "core", "description": "xkcd.net/325", "legal_uris": [ "http://xkcd.com/license.html" ], "mirrors": [], "name": "source", "origins": [], "refresh_seconds": 43200, "registration_uri": "", "related_uris": [] } ] } ], "version": 1 } """ pub = self.__get_bobcat_pub() # Nuke the origin data. pub.repository.reset_origins() # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertPrettyEqual(output, expected) # Now parse the result and verify that there is a repository, # but without origins information. pub, pkg_names = p5i.parse(data=output)[0] self.assertPrettyEqual(pub.repository.origins, [])
def test_parse_write(self): """Verify that the p5i parsing and writing works as expected.""" # Verify that p5i export and parse works as expected. pub = self.__get_bobcat_pub() # First, Ensure that PkgFmri and strings are supported properly. # Build a simple list of packages. fmri_foo = fmri.PkgFmri("pkg:/[email protected],5.11-0", None) pnames = { "bobcat": [fmri_foo], "": ["pkg:/[email protected],5.11-0", "baz"], } # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub], pkg_names=pnames) # Verify that the p5i data ends with a terminating newline. fobj.seek(-1, 2) self.assertEqual(fobj.read(), "\n") # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertPrettyEqual(output, self.p5i_bobcat) def validate_results(results): # First result should be 'bobcat' publisher and its # pkg_names. pub, pkg_names = results[0] self.assertEqual(pub.prefix, "bobcat") self.assertEqual(pub.alias, "cat") repo = pub.repository self.assertEqual(repo.name, "source") self.assertEqual(repo.description, "xkcd.net/325") self.assertEqual(repo.legal_uris[0], "http://xkcd.com/license.html") self.assertEqual(repo.refresh_seconds, 43200) self.assertEqual(pkg_names, [str(fmri_foo)]) # Last result should be no publisher and a list of # pkg_names. pub, pkg_names = results[1] self.assertEqual(pub, None) self.assertEqual(pkg_names, ["pkg:/[email protected],5.11-0", "baz"]) # Verify that parse returns the expected object and information # when provided a fileobj. fobj.seek(0) validate_results(p5i.parse(fileobj=fobj)) # Verify that parse returns the expected object and information # when provided a file path. fobj.seek(0) (fd1, path1) = tempfile.mkstemp(dir=self.test_root) os.write(fd1, fobj.read()) os.close(fd1) validate_results(p5i.parse(location=path1)) # Verify that parse returns the expected object and information # when provided a file URI. location = os.path.abspath(path1) location = urlparse.urlunparse(("file", "", urllib.pathname2url(location), "", "", "")) validate_results(p5i.parse(location=location)) fobj.close() fobj = None # Verify that appropriate exceptions are raised for p5i # information that can't be retrieved (doesn't exist). nefpath = os.path.join(self.test_root, "non-existent") self.assertRaises(api_errors.RetrievalError, p5i.parse, location="file://%s" % nefpath) self.assertRaises(api_errors.RetrievalError, p5i.parse, location=nefpath) # Verify that appropriate exceptions are raised for invalid # p5i information. lcpath = os.path.join(self.test_root, "libc.so.1") location = os.path.abspath(lcpath) location = urlparse.urlunparse(("file", "", urllib.pathname2url(location), "", "", "")) # First, test as a file:// URI. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location) # Last, test as a pathname. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location)