Exemplo n.º 1
0
 def test_aggregation(self):
     d = os.path.join(HERE, "fake_archives")
     loc1 = DirectoryLocator(d)
     loc2 = SimpleScrapingLocator("https://pypi.python.org/simple/", timeout=5.0)
     locator = AggregatingLocator(loc1, loc2)
     exp1 = os.path.join(HERE, "fake_archives", "subdir", "subsubdir", "Flask-0.9.tar.gz")
     exp2 = "/Flask-0.9.tar.gz"
     result = locator.get_project("flask")
     self.assertEqual(len(result), 3)
     self.assertIn("0.9", result)
     dist = result["0.9"]
     self.assertEqual(dist.name, "Flask")
     self.assertEqual(dist.version, "0.9")
     scheme, _, path, _, _, _ = urlparse(dist.source_url)
     self.assertEqual(scheme, "file")
     self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1))
     locator.merge = True
     locator._cache.clear()
     result = locator.get_project("flask")
     self.assertGreater(len(result), 3)
     self.assertIn("0.9", result)
     dist = result["0.9"]
     self.assertEqual(dist.name, "Flask")
     self.assertEqual(dist.version, "0.9")
     self.assertTrue(dist.source_url.endswith(exp2))
     return
     # The following code is slow because it has
     # to get all the dist names by scraping :-(
     n1 = loc1.get_distribution_names()
     n2 = loc2.get_distribution_names()
     self.assertEqual(locator.get_distribution_names(), n1 | n2)
Exemplo n.º 2
0
 def test_aggregation(self):
     d = os.path.join(HERE, 'fake_archives')
     loc1 = DirectoryLocator(d)
     loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/',
                                  timeout=5.0)
     locator = AggregatingLocator(loc1, loc2)
     exp1 = os.path.join(HERE, 'fake_archives', 'subdir',
                         'subsubdir', 'Flask-0.9.tar.gz')
     exp2 = 'https://pypi.python.org/packages/source/F/Flask/Flask-0.9.tar.gz'
     result = locator.get_project('flask')
     self.assertEqual(len(result), 1)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     scheme, _, path, _, _, _ = urlparse(dist.source_url)
     self.assertEqual(scheme, 'file')
     self.assertEqual(os.path.normcase(url2pathname(path)),
                      os.path.normcase(exp1))
     locator.merge = True
     locator._cache.clear()
     result = locator.get_project('flask')
     self.assertGreater(len(result), 1)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     self.assertEqual(dist.source_url, exp2)
     return
     # The following code is slow because it has
     # to get all the dist names by scraping :-(
     n1 = loc1.get_distribution_names()
     n2 = loc2.get_distribution_names()
     self.assertEqual(locator.get_distribution_names(), n1 | n2)
Exemplo n.º 3
0
 def test_aggregation(self):
     d = os.path.join(HERE, 'fake_archives')
     loc1 = DirectoryLocator(d)
     loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/',
                                  timeout=5.0)
     locator = AggregatingLocator(loc1, loc2)
     exp1 = os.path.join(HERE, 'fake_archives', 'subdir', 'subsubdir',
                         'Flask-0.9.tar.gz')
     exp2 = '/Flask-0.9.tar.gz'
     result = locator.get_project('flask')
     self.assertEqual(len(result), 3)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     scheme, _, path, _, _, _ = urlparse(dist.source_url)
     self.assertEqual(scheme, 'file')
     self.assertEqual(os.path.normcase(url2pathname(path)),
                      os.path.normcase(exp1))
     locator.merge = True
     locator._cache.clear()
     result = locator.get_project('flask')
     self.assertGreater(len(result), 3)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     self.assertTrue(dist.source_url.endswith(exp2))
     return
     # The following code is slow because it has
     # to get all the dist names by scraping :-(
     n1 = loc1.get_distribution_names()
     n2 = loc2.get_distribution_names()
     self.assertEqual(locator.get_distribution_names(), n1 | n2)
Exemplo n.º 4
0
 def test_scraper(self):
     locator = SimpleScrapingLocator('https://pypi.org/simple/')
     for name in ('sarge', 'Sarge'):
         result = locator.get_project(name)
         self.assertIn('0.1', result)
         dist = result['0.1']
         self.assertEqual(dist.name, 'sarge')
         self.assertEqual(dist.version, '0.1')
         self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz'))
         SARGE_HASHES = (
             ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8'),
             ('sha256',
              'ec2ec0b1c9ed9a77f9b4322c16e4954c93aa00d974a1af931b18eb751e377dfe'
              ),
         )
         self.assertIn(dist.digest, SARGE_HASHES)
     # Test to check issue #112 fix.
     locator.wheel_tags = [('cp27', 'cp27m', 'win_amd64'),
                           ('cp35', 'cp35m', 'win32')]
     result = locator.get_project('simplejson')
     urls = result['urls'].get('3.16.0')
     self.assertTrue(urls)
     self.assertEqual(3, len(urls))
     expected = set([
         'simplejson-3.16.0-cp27-cp27m-win_amd64.whl',
         'simplejson-3.16.0-cp35-cp35m-win32.whl',
         'simplejson-3.16.0.tar.gz'
     ])
     for u in urls:
         p = posixpath.split(urlparse(u).path)[-1]
         self.assertIn(p, expected)
     return
     # The following is too slow
     names = locator.get_distribution_names()
     self.assertGreater(len(names), 25000)
Exemplo n.º 5
0
 def test_scraper(self):
     locator = SimpleScrapingLocator("https://pypi.python.org/simple/")
     for name in ("sarge", "Sarge"):
         result = locator.get_project(name)
         self.assertIn("0.1", result)
         dist = result["0.1"]
         self.assertEqual(dist.name, "sarge")
         self.assertEqual(dist.version, "0.1")
         self.assertTrue(dist.source_url.endswith("/sarge-0.1.tar.gz"))
         self.assertEqual(dist.digest, ("md5", "961ddd9bc085fdd8b248c6dd96ceb1c8"))
     return
     # The following is too slow
     names = locator.get_distribution_names()
     self.assertGreater(len(names), 25000)
Exemplo n.º 6
0
 def test_scraper(self):
     locator = SimpleScrapingLocator('https://pypi.python.org/simple/')
     for name in ('sarge', 'Sarge'):
         result = locator.get_project(name)
         self.assertIn('0.1', result)
         dist = result['0.1']
         self.assertEqual(dist.name, 'sarge')
         self.assertEqual(dist.version, '0.1')
         self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz'))
         self.assertEqual(dist.digest,
                          ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8'))
     return
     # The following is too slow
     names = locator.get_distribution_names()
     self.assertGreater(len(names), 25000)
Exemplo n.º 7
0
 def test_scraper(self):
     locator = SimpleScrapingLocator('https://pypi.python.org/simple/')
     for name in ('sarge', 'Sarge'):
         result = locator.get_project(name)
         self.assertIn('0.1', result)
         dist = result['0.1']
         self.assertEqual(dist.name, 'sarge')
         self.assertEqual(dist.version, '0.1')
         self.assertEqual(dist.source_url,
                          'https://pypi.python.org/packages/source/s/sarge/'
                          'sarge-0.1.tar.gz')
         self.assertEqual(dist.digest,
                          ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8'))
     return
     # The following is too slow
     names = locator.get_distribution_names()
     self.assertGreater(len(names), 25000)