Exemplo n.º 1
0
 def test_scraper(self):
     locator = SimpleScrapingLocator('https://pypi.org/simple/')
     for name in ('sarge', 'Sarge'):
         result = locator.get_project(name)
         self.assertIn('0.1', result)
         dist = result['0.1']
         self.assertEqual(dist.name, 'sarge')
         self.assertEqual(dist.version, '0.1')
         self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz'))
         SARGE_HASHES = (
             ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8'),
             ('sha256',
              'ec2ec0b1c9ed9a77f9b4322c16e4954c93aa00d974a1af931b18eb751e377dfe'
              ),
         )
         self.assertIn(dist.digest, SARGE_HASHES)
     # Test to check issue #112 fix.
     locator.wheel_tags = [('cp27', 'cp27m', 'win_amd64'),
                           ('cp35', 'cp35m', 'win32')]
     result = locator.get_project('simplejson')
     urls = result['urls'].get('3.16.0')
     self.assertTrue(urls)
     self.assertEqual(3, len(urls))
     expected = set([
         'simplejson-3.16.0-cp27-cp27m-win_amd64.whl',
         'simplejson-3.16.0-cp35-cp35m-win32.whl',
         'simplejson-3.16.0.tar.gz'
     ])
     for u in urls:
         p = posixpath.split(urlparse(u).path)[-1]
         self.assertIn(p, expected)
     return
     # The following is too slow
     names = locator.get_distribution_names()
     self.assertGreater(len(names), 25000)