def test_scraper(self): locator = SimpleScrapingLocator('https://pypi.org/simple/') for name in ('sarge', 'Sarge'): result = locator.get_project(name) self.assertIn('0.1', result) dist = result['0.1'] self.assertEqual(dist.name, 'sarge') self.assertEqual(dist.version, '0.1') self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz')) SARGE_HASHES = ( ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8'), ('sha256', 'ec2ec0b1c9ed9a77f9b4322c16e4954c93aa00d974a1af931b18eb751e377dfe' ), ) self.assertIn(dist.digest, SARGE_HASHES) # Test to check issue #112 fix. locator.wheel_tags = [('cp27', 'cp27m', 'win_amd64'), ('cp35', 'cp35m', 'win32')] result = locator.get_project('simplejson') urls = result['urls'].get('3.16.0') self.assertTrue(urls) self.assertEqual(3, len(urls)) expected = set([ 'simplejson-3.16.0-cp27-cp27m-win_amd64.whl', 'simplejson-3.16.0-cp35-cp35m-win32.whl', 'simplejson-3.16.0.tar.gz' ]) for u in urls: p = posixpath.split(urlparse(u).path)[-1] self.assertIn(p, expected) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def test_unicode_project_name(self): # Just checking to see that no exceptions are raised. NAME = '\u2603' locator = SimpleScrapingLocator('https://pypi.python.org/simple/') result = locator.get_project(NAME) self.assertFalse(result) locator = PyPIJSONLocator('https://pypi.python.org/pypi/') result = locator.get_project(NAME) self.assertFalse(result)
def test_unicode_project_name(self): # Just checking to see that no exceptions are raised. NAME = "\u2603" locator = SimpleScrapingLocator("https://pypi.python.org/simple/") result = locator.get_project(NAME) expected = {"urls": {}, "digests": {}} self.assertEqual(result, expected) locator = PyPIJSONLocator("https://pypi.python.org/pypi/") result = locator.get_project(NAME) self.assertEqual(result, expected)
def test_unicode_project_name(self): # Just checking to see that no exceptions are raised. NAME = '\u2603' locator = SimpleScrapingLocator('https://pypi.python.org/simple/') result = locator.get_project(NAME) expected = {'urls': {}, 'digests': {}} self.assertEqual(result, expected) locator = PyPIJSONLocator('https://pypi.python.org/pypi/') result = locator.get_project(NAME) self.assertEqual(result, expected)
def test_scraper(self): locator = SimpleScrapingLocator("https://pypi.python.org/simple/") for name in ("sarge", "Sarge"): result = locator.get_project(name) self.assertIn("0.1", result) dist = result["0.1"] self.assertEqual(dist.name, "sarge") self.assertEqual(dist.version, "0.1") self.assertTrue(dist.source_url.endswith("/sarge-0.1.tar.gz")) self.assertEqual(dist.digest, ("md5", "961ddd9bc085fdd8b248c6dd96ceb1c8")) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def test_scraper(self): locator = SimpleScrapingLocator('https://pypi.python.org/simple/') for name in ('sarge', 'Sarge'): result = locator.get_project(name) self.assertIn('0.1', result) dist = result['0.1'] self.assertEqual(dist.name, 'sarge') self.assertEqual(dist.version, '0.1') self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz')) self.assertEqual(dist.digest, ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8')) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def test_scraper(self): locator = SimpleScrapingLocator('https://pypi.python.org/simple/') for name in ('sarge', 'Sarge'): result = locator.get_project(name) self.assertIn('0.1', result) dist = result['0.1'] self.assertEqual(dist.name, 'sarge') self.assertEqual(dist.version, '0.1') self.assertEqual(dist.source_url, 'https://pypi.python.org/packages/source/s/sarge/' 'sarge-0.1.tar.gz') self.assertEqual(dist.digest, ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8')) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)