Exemple #1
0
 def test_aggregation(self):
     d = os.path.join(HERE, "fake_archives")
     loc1 = DirectoryLocator(d)
     loc2 = SimpleScrapingLocator("https://pypi.python.org/simple/", timeout=5.0)
     locator = AggregatingLocator(loc1, loc2)
     exp1 = os.path.join(HERE, "fake_archives", "subdir", "subsubdir", "Flask-0.9.tar.gz")
     exp2 = "/Flask-0.9.tar.gz"
     result = locator.get_project("flask")
     self.assertEqual(len(result), 3)
     self.assertIn("0.9", result)
     dist = result["0.9"]
     self.assertEqual(dist.name, "Flask")
     self.assertEqual(dist.version, "0.9")
     scheme, _, path, _, _, _ = urlparse(dist.source_url)
     self.assertEqual(scheme, "file")
     self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1))
     locator.merge = True
     locator._cache.clear()
     result = locator.get_project("flask")
     self.assertGreater(len(result), 3)
     self.assertIn("0.9", result)
     dist = result["0.9"]
     self.assertEqual(dist.name, "Flask")
     self.assertEqual(dist.version, "0.9")
     self.assertTrue(dist.source_url.endswith(exp2))
     return
     # The following code is slow because it has
     # to get all the dist names by scraping :-(
     n1 = loc1.get_distribution_names()
     n2 = loc2.get_distribution_names()
     self.assertEqual(locator.get_distribution_names(), n1 | n2)
Exemple #2
0
 def test_aggregation(self):
     d = os.path.join(HERE, 'fake_archives')
     loc1 = DirectoryLocator(d)
     loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/',
                                  timeout=5.0)
     locator = AggregatingLocator(loc1, loc2)
     exp1 = os.path.join(HERE, 'fake_archives', 'subdir', 'subsubdir',
                         'Flask-0.9.tar.gz')
     exp2 = '/Flask-0.9.tar.gz'
     result = locator.get_project('flask')
     self.assertEqual(len(result), 3)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     scheme, _, path, _, _, _ = urlparse(dist.source_url)
     self.assertEqual(scheme, 'file')
     self.assertEqual(os.path.normcase(url2pathname(path)),
                      os.path.normcase(exp1))
     locator.merge = True
     locator._cache.clear()
     result = locator.get_project('flask')
     self.assertGreater(len(result), 3)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     self.assertTrue(dist.source_url.endswith(exp2))
     return
     # The following code is slow because it has
     # to get all the dist names by scraping :-(
     n1 = loc1.get_distribution_names()
     n2 = loc2.get_distribution_names()
     self.assertEqual(locator.get_distribution_names(), n1 | n2)
Exemple #3
0
 def test_aggregation(self):
     d = os.path.join(HERE, 'fake_archives')
     loc1 = DirectoryLocator(d)
     loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/',
                                  timeout=5.0)
     locator = AggregatingLocator(loc1, loc2)
     exp1 = os.path.join(HERE, 'fake_archives', 'subdir',
                         'subsubdir', 'Flask-0.9.tar.gz')
     exp2 = 'https://pypi.python.org/packages/source/F/Flask/Flask-0.9.tar.gz'
     result = locator.get_project('flask')
     self.assertEqual(len(result), 1)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     scheme, _, path, _, _, _ = urlparse(dist.source_url)
     self.assertEqual(scheme, 'file')
     self.assertEqual(os.path.normcase(url2pathname(path)),
                      os.path.normcase(exp1))
     locator.merge = True
     locator._cache.clear()
     result = locator.get_project('flask')
     self.assertGreater(len(result), 1)
     self.assertIn('0.9', result)
     dist = result['0.9']
     self.assertEqual(dist.name, 'Flask')
     self.assertEqual(dist.version, '0.9')
     self.assertEqual(dist.source_url, exp2)
     return
     # The following code is slow because it has
     # to get all the dist names by scraping :-(
     n1 = loc1.get_distribution_names()
     n2 = loc2.get_distribution_names()
     self.assertEqual(locator.get_distribution_names(), n1 | n2)
Exemple #4
0
 def get_path(url):
     t = urlparse(url)
     return url2pathname(t.path)
Exemple #5
0
 def get_path(url):
     t = urlparse(url)
     return url2pathname(t.path)