def _write_publisher_response(pubs, htdocs_path, repo_prefix): """Writes a static publisher/0 response for the depot.""" try: # convert our list of strings to a list of Publishers pub_objs = [pkg.client.publisher.Publisher(pub) for pub in pubs] # write individual reponses for the publishers for pub in pub_objs: pub_path = os.path.join( htdocs_path, os.path.sep.join([repo_prefix, pub.prefix] + DEPOT_PUB_DIRNAME)) misc.makedirs(pub_path) with file(os.path.join(pub_path, "index.html"), "w") as\ pub_file: p5i.write(pub_file, [pub]) # write a response that contains all publishers pub_path = os.path.join( htdocs_path, os.path.sep.join([repo_prefix] + DEPOT_PUB_DIRNAME)) os.makedirs(pub_path) with file(os.path.join(pub_path, "index.html"), "w") as \ pub_file: p5i.write(pub_file, pub_objs) except (OSError, apx.ApiException) as err: raise DepotException( _("Unable to write publisher response: {0}").format(err))
def _write_publisher_response(pubs, htdocs_path, repo_prefix): """Writes a static publisher/0 response for the depot.""" try: # convert our list of strings to a list of Publishers pub_objs = [pkg.client.publisher.Publisher(pub) for pub in pubs] # write individual reponses for the publishers for pub in pub_objs: pub_path = os.path.join(htdocs_path, os.path.sep.join( [repo_prefix, pub.prefix] + DEPOT_PUB_DIRNAME)) misc.makedirs(pub_path) with file(os.path.join(pub_path, "index.html"), "w") as\ pub_file: p5i.write(pub_file, [pub]) # write a response that contains all publishers pub_path = os.path.join(htdocs_path, os.path.sep.join([repo_prefix] + DEPOT_PUB_DIRNAME)) os.makedirs(pub_path) with file(os.path.join(pub_path, "index.html"), "w") as \ pub_file: p5i.write(pub_file, pub_objs) except (OSError, apx.ApiException), err: raise DepotException( _("Unable to write publisher response: %s") % err)
def test_parse_write_partial(self): """Verify that a p5i file with various parts of a publisher's repository configuration omitted will still parse and write as expected.""" # First, test the no repository case. # NOTE: Spaces, or lack thereof, at the end of a line, are # important. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [] } ], "version": 1 } """ pub = self.__get_bobcat_pub(omit_repo=True) # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertEqualJSON(expected, output) # Now parse the result and verify no repositories are defined. pub, pkg_names = p5i.parse(data=output)[0] self.assert_(not pub.repository) # Next, test the partial repository configuration case. No # origin is provided, but everything else is. # NOTE: Spaces, or lack thereof, at the end of a line, are # important. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [ { "collection_type": "core", "description": "xkcd.net/325", "legal_uris": [ "http://xkcd.com/license.html" ], "mirrors": [], "name": "source", "origins": [], "refresh_seconds": 43200, "registration_uri": "", "related_uris": [] } ] } ], "version": 1 } """ pub = self.__get_bobcat_pub() # Nuke the origin data. pub.repository.reset_origins() # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertEqualJSON(expected, output) # Now parse the result and verify that there is a repository, # but without origins information. pub, pkg_names = p5i.parse(data=output)[0] self.assertEqualDiff([], pub.repository.origins)
def test_parse_write(self): """Verify that the p5i parsing and writing works as expected.""" # Verify that p5i export and parse works as expected. pub = self.__get_bobcat_pub() # First, Ensure that PkgFmri and strings are supported properly. # Build a simple list of packages. fmri_foo = fmri.PkgFmri("pkg:/[email protected],5.11-0") pnames = { "bobcat": [fmri_foo], "": ["pkg:/[email protected],5.11-0", "baz"], } # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub], pkg_names=pnames) # Verify that the p5i data ends with a terminating newline. fobj.seek(-1, 2) self.assertEqual(fobj.read(), "\n") # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertEqualJSON(self.p5i_bobcat, output) def validate_results(results): # First result should be 'bobcat' publisher and its # pkg_names. pub, pkg_names = results[0] self.assertEqual(pub.prefix, "bobcat") self.assertEqual(pub.alias, "cat") repo = pub.repository self.assertEqual(repo.name, "source") self.assertEqual(repo.description, "xkcd.net/325") self.assertEqual(repo.legal_uris[0], "http://xkcd.com/license.html") self.assertEqual(repo.refresh_seconds, 43200) self.assertEqual(pkg_names, [str(fmri_foo)]) # Last result should be no publisher and a list of # pkg_names. pub, pkg_names = results[1] self.assertEqual(pub, None) self.assertEqual(pkg_names, ["pkg:/[email protected],5.11-0", "baz"]) # Verify that parse returns the expected object and information # when provided a fileobj. fobj.seek(0) validate_results(p5i.parse(fileobj=fobj)) # Verify that parse returns the expected object and information # when provided a file path. fobj.seek(0) (fd1, path1) = tempfile.mkstemp(dir=self.test_root) os.write(fd1, fobj.read()) os.close(fd1) validate_results(p5i.parse(location=path1)) # Verify that parse returns the expected object and information # when provided a file URI. location = os.path.abspath(path1) location = urlparse.urlunparse( ("file", "", urllib.pathname2url(location), "", "", "")) validate_results(p5i.parse(location=location)) fobj.close() fobj = None # Verify that appropriate exceptions are raised for p5i # information that can't be retrieved (doesn't exist). nefpath = os.path.join(self.test_root, "non-existent") self.assertRaises(api_errors.RetrievalError, p5i.parse, location="file://{0}".format(nefpath)) self.assertRaises(api_errors.RetrievalError, p5i.parse, location=nefpath) # Verify that appropriate exceptions are raised for invalid # p5i information. lcpath = os.path.join(self.test_root, "libc.so.1") location = os.path.abspath(lcpath) location = urlparse.urlunparse( ("file", "", urllib.pathname2url(location), "", "", "")) # First, test as a file:// URI. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location) # Last, test as a pathname. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location)
def test_parse_write_partial(self): """Verify that a p5i file with various parts of a publisher's repository configuration omitted will still parse and write as expected.""" # First, test the no repository case. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [] } ], "version": 1 } """ pub = self.__get_bobcat_pub(omit_repo=True) # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertPrettyEqual(output, expected) # Now parse the result and verify no repositories are defined. pub, pkg_names = p5i.parse(data=output)[0] self.assert_(not pub.repository) # Next, test the partial repository configuration case. No # origin is provided, but everything else is. expected = """{ "packages": [], "publishers": [ { "alias": "cat", "name": "bobcat", "packages": [], "repositories": [ { "collection_type": "core", "description": "xkcd.net/325", "legal_uris": [ "http://xkcd.com/license.html" ], "mirrors": [], "name": "source", "origins": [], "refresh_seconds": 43200, "registration_uri": "", "related_uris": [] } ] } ], "version": 1 } """ pub = self.__get_bobcat_pub() # Nuke the origin data. pub.repository.reset_origins() # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub]) # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertPrettyEqual(output, expected) # Now parse the result and verify that there is a repository, # but without origins information. pub, pkg_names = p5i.parse(data=output)[0] self.assertPrettyEqual(pub.repository.origins, [])
def test_parse_write(self): """Verify that the p5i parsing and writing works as expected.""" # Verify that p5i export and parse works as expected. pub = self.__get_bobcat_pub() # First, Ensure that PkgFmri and strings are supported properly. # Build a simple list of packages. fmri_foo = fmri.PkgFmri("pkg:/[email protected],5.11-0", None) pnames = { "bobcat": [fmri_foo], "": ["pkg:/[email protected],5.11-0", "baz"], } # Dump the p5i data. fobj = cStringIO.StringIO() p5i.write(fobj, [pub], pkg_names=pnames) # Verify that the p5i data ends with a terminating newline. fobj.seek(-1, 2) self.assertEqual(fobj.read(), "\n") # Verify that output matches expected output. fobj.seek(0) output = fobj.read() self.assertPrettyEqual(output, self.p5i_bobcat) def validate_results(results): # First result should be 'bobcat' publisher and its # pkg_names. pub, pkg_names = results[0] self.assertEqual(pub.prefix, "bobcat") self.assertEqual(pub.alias, "cat") repo = pub.repository self.assertEqual(repo.name, "source") self.assertEqual(repo.description, "xkcd.net/325") self.assertEqual(repo.legal_uris[0], "http://xkcd.com/license.html") self.assertEqual(repo.refresh_seconds, 43200) self.assertEqual(pkg_names, [str(fmri_foo)]) # Last result should be no publisher and a list of # pkg_names. pub, pkg_names = results[1] self.assertEqual(pub, None) self.assertEqual(pkg_names, ["pkg:/[email protected],5.11-0", "baz"]) # Verify that parse returns the expected object and information # when provided a fileobj. fobj.seek(0) validate_results(p5i.parse(fileobj=fobj)) # Verify that parse returns the expected object and information # when provided a file path. fobj.seek(0) (fd1, path1) = tempfile.mkstemp(dir=self.test_root) os.write(fd1, fobj.read()) os.close(fd1) validate_results(p5i.parse(location=path1)) # Verify that parse returns the expected object and information # when provided a file URI. location = os.path.abspath(path1) location = urlparse.urlunparse(("file", "", urllib.pathname2url(location), "", "", "")) validate_results(p5i.parse(location=location)) fobj.close() fobj = None # Verify that appropriate exceptions are raised for p5i # information that can't be retrieved (doesn't exist). nefpath = os.path.join(self.test_root, "non-existent") self.assertRaises(api_errors.RetrievalError, p5i.parse, location="file://%s" % nefpath) self.assertRaises(api_errors.RetrievalError, p5i.parse, location=nefpath) # Verify that appropriate exceptions are raised for invalid # p5i information. lcpath = os.path.join(self.test_root, "libc.so.1") location = os.path.abspath(lcpath) location = urlparse.urlunparse(("file", "", urllib.pathname2url(location), "", "", "")) # First, test as a file:// URI. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location) # Last, test as a pathname. self.assertRaises(api_errors.InvalidP5IFile, p5i.parse, location=location)