def test_read_metadata(self): fields = { 'name': 'project', 'version': '1.0', 'description': 'desc', 'summary': 'xxx', 'download_url': 'http://example.com', 'keywords': ['one', 'two'], 'requires_dist': ['foo'] } metadata = LegacyMetadata(mapping=fields) PKG_INFO = StringIO() metadata.write_file(PKG_INFO) PKG_INFO.seek(0) metadata = LegacyMetadata(fileobj=PKG_INFO) self.assertEqual(metadata['name'], 'project') self.assertEqual(metadata['version'], '1.0') self.assertEqual(metadata['summary'], 'xxx') self.assertEqual(metadata['download_url'], 'http://example.com') self.assertEqual(metadata['keywords'], ['one', 'two']) self.assertEqual(metadata['platform'], []) self.assertEqual(metadata['obsoletes'], []) self.assertEqual(metadata['requires-dist'], ['foo'])
def test_graph_bad_version_to_dot(self): expected = ( ('towel-stuff', 'bacon', 'bacon (<=0.2)'), ('grammar', 'bacon', 'truffles (>=1.2)'), ('choxie', 'towel-stuff', 'towel-stuff (0.1)'), ('banana', 'strawberry', 'strawberry (>=0.5)'), ) dists = self.get_dists( self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS, True) graph = make_graph(dists) buf = StringIO() graph.to_dot(buf) buf.seek(0) matches = [] lines = buf.readlines() for line in lines[1:-1]: # skip the first and the last lines if line[-1] == '\n': line = line[:-1] match = self.EDGE.match(line.strip()) self.assertIsNot(match, None) matches.append(match.groups()) self.checkLists(matches, expected)
def test_graph_bad_version_to_dot(self): expected = ( ('towel-stuff', 'bacon', 'bacon (<=0.2)'), ('grammar', 'bacon', 'truffles (>=1.2)'), ('choxie', 'towel-stuff', 'towel-stuff (0.1)'), ('banana', 'strawberry', 'strawberry (>=0.5)'), ) dists = self.get_dists(self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS, True) graph = make_graph(dists) buf = StringIO() graph.to_dot(buf) buf.seek(0) matches = [] lines = buf.readlines() for line in lines[1:-1]: # skip the first and the last lines if line[-1] == '\n': line = line[:-1] match = self.EDGE.match(line.strip()) self.assertIsNot(match, None) matches.append(match.groups()) self.checkLists(matches, expected)
def test_graph_disconnected_to_dot(self): dependencies_expected = ( ('towel-stuff', 'bacon', 'bacon (<=0.2)'), ('grammar', 'bacon', 'truffles (>=1.2)'), ('choxie', 'towel-stuff', 'towel-stuff (0.1)'), ('banana', 'strawberry', 'strawberry (>=0.5)'), ) disconnected_expected = ('cheese', 'bacon', 'strawberry') dists = self.get_dists(self.DISTROS_DIST + self.DISTROS_EGG, True) graph = make_graph(dists) buf = StringIO() graph.to_dot(buf, skip_disconnected=False) buf.seek(0) lines = buf.readlines() dependencies_lines = [] disconnected_lines = [] # First sort output lines into dependencies and disconnected lines. # We also skip the attribute lines, and don't include the "{" and "}" # lines. disconnected_active = False for line in lines[1:-1]: # Skip first and last line if line.startswith('subgraph disconnected'): disconnected_active = True continue if line.startswith('}') and disconnected_active: disconnected_active = False continue if disconnected_active: # Skip the 'label = "Disconnected"', etc. attribute lines. if ' = ' not in line: disconnected_lines.append(line) else: dependencies_lines.append(line) dependencies_matches = [] for line in dependencies_lines: if line[-1] == '\n': line = line[:-1] match = self.EDGE.match(line.strip()) self.assertIsNot(match, None) dependencies_matches.append(match.groups()) disconnected_matches = [] for line in disconnected_lines: if line[-1] == '\n': line = line[:-1] line = line.strip('"') disconnected_matches.append(line) self.checkLists(dependencies_matches, dependencies_expected) self.checkLists(disconnected_matches, disconnected_expected)
def test_metadata_read_write(self): PKG_INFO = os.path.join(HERE, 'PKG-INFO') metadata = LegacyMetadata(PKG_INFO) out = StringIO() metadata.write_file(out) out.seek(0) res = LegacyMetadata() res.read_file(out) self.assertEqual(metadata.values(), res.values())
def test_project_url(self): metadata = LegacyMetadata() metadata['Project-URL'] = [('one', 'http://ok')] self.assertEqual(metadata['Project-URL'], [('one', 'http://ok')]) metadata.set_metadata_version() self.assertEqual(metadata['Metadata-Version'], '1.2') # make sure this particular field is handled properly when written fp = StringIO() metadata.write_file(fp) self.assertIn('Project-URL: one,http://ok', fp.getvalue().split('\n')) fp.seek(0) metadata = LegacyMetadata() metadata.read_file(fp) self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
def test_description(self): content = self.get_file_contents('PKG-INFO') metadata = LegacyMetadata() metadata.read_file(StringIO(content)) # see if we can read the description now DESC = os.path.join(HERE, 'LONG_DESC.txt') f = open(DESC) try: wanted = f.read() finally: f.close() self.assertEqual(wanted, metadata['Description']) # save the file somewhere and make sure we can read it back out = StringIO() metadata.write_file(out) out.seek(0) out.seek(0) metadata = LegacyMetadata() metadata.read_file(out) self.assertEqual(wanted, metadata['Description'])
def test_read_metadata(self): fields = {'name': 'project', 'version': '1.0', 'description': 'desc', 'summary': 'xxx', 'download_url': 'http://example.com', 'keywords': ['one', 'two'], 'requires_dist': ['foo']} metadata = LegacyMetadata(mapping=fields) PKG_INFO = StringIO() metadata.write_file(PKG_INFO) PKG_INFO.seek(0) metadata = LegacyMetadata(fileobj=PKG_INFO) self.assertEqual(metadata['name'], 'project') self.assertEqual(metadata['version'], '1.0') self.assertEqual(metadata['summary'], 'xxx') self.assertEqual(metadata['download_url'], 'http://example.com') self.assertEqual(metadata['keywords'], ['one', 'two']) self.assertEqual(metadata['platform'], []) self.assertEqual(metadata['obsoletes'], []) self.assertEqual(metadata['requires-dist'], ['foo'])
def test_graph_to_dot(self): expected = ( ("towel-stuff", "bacon", "bacon (<=0.2)"), ("grammar", "bacon", "truffles (>=1.2)"), ("choxie", "towel-stuff", "towel-stuff (0.1)"), ("banana", "strawberry", "strawberry (>=0.5)"), ) dists = self.get_dists(self.DISTROS_DIST + self.DISTROS_EGG, True) graph = make_graph(dists) buf = StringIO() graph.to_dot(buf) buf.seek(0) matches = [] lines = buf.readlines() for line in lines[1:-1]: # skip the first and the last lines if line[-1] == "\n": line = line[:-1] match = self.EDGE.match(line.strip()) self.assertIsNot(match, None) matches.append(match.groups()) self.checkLists(matches, expected)