def test_aggregation(self): d = os.path.join(HERE, 'fake_archives') loc1 = DirectoryLocator(d) loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/', timeout=5.0) locator = AggregatingLocator(loc1, loc2) exp1 = os.path.join(HERE, 'fake_archives', 'subdir', 'subsubdir', 'Flask-0.9.tar.gz') exp2 = '/Flask-0.9.tar.gz' result = locator.get_project('flask') self.assertEqual(len(result), 3) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') scheme, _, path, _, _, _ = urlparse(dist.source_url) self.assertEqual(scheme, 'file') self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1)) locator.merge = True locator._cache.clear() result = locator.get_project('flask') self.assertGreater(len(result), 3) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') self.assertTrue(dist.source_url.endswith(exp2)) return # The following code is slow because it has # to get all the dist names by scraping :-( n1 = loc1.get_distribution_names() n2 = loc2.get_distribution_names() self.assertEqual(locator.get_distribution_names(), n1 | n2)
def test_aggregation(self): d = os.path.join(HERE, "fake_archives") loc1 = DirectoryLocator(d) loc2 = SimpleScrapingLocator("https://pypi.python.org/simple/", timeout=5.0) locator = AggregatingLocator(loc1, loc2) exp1 = os.path.join(HERE, "fake_archives", "subdir", "subsubdir", "Flask-0.9.tar.gz") exp2 = "/Flask-0.9.tar.gz" result = locator.get_project("flask") self.assertEqual(len(result), 3) self.assertIn("0.9", result) dist = result["0.9"] self.assertEqual(dist.name, "Flask") self.assertEqual(dist.version, "0.9") scheme, _, path, _, _, _ = urlparse(dist.source_url) self.assertEqual(scheme, "file") self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1)) locator.merge = True locator._cache.clear() result = locator.get_project("flask") self.assertGreater(len(result), 3) self.assertIn("0.9", result) dist = result["0.9"] self.assertEqual(dist.name, "Flask") self.assertEqual(dist.version, "0.9") self.assertTrue(dist.source_url.endswith(exp2)) return # The following code is slow because it has # to get all the dist names by scraping :-( n1 = loc1.get_distribution_names() n2 = loc2.get_distribution_names() self.assertEqual(locator.get_distribution_names(), n1 | n2)
def test_aggregation(self): d = os.path.join(HERE, 'fake_archives') loc1 = DirectoryLocator(d) loc2 = SimpleScrapingLocator('https://pypi.python.org/simple/', timeout=5.0) locator = AggregatingLocator(loc1, loc2) exp1 = os.path.join(HERE, 'fake_archives', 'subdir', 'subsubdir', 'Flask-0.9.tar.gz') exp2 = 'https://pypi.python.org/packages/source/F/Flask/Flask-0.9.tar.gz' result = locator.get_project('flask') self.assertEqual(len(result), 1) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') scheme, _, path, _, _, _ = urlparse(dist.source_url) self.assertEqual(scheme, 'file') self.assertEqual(os.path.normcase(url2pathname(path)), os.path.normcase(exp1)) locator.merge = True locator._cache.clear() result = locator.get_project('flask') self.assertGreater(len(result), 1) self.assertIn('0.9', result) dist = result['0.9'] self.assertEqual(dist.name, 'Flask') self.assertEqual(dist.version, '0.9') self.assertEqual(dist.source_url, exp2) return # The following code is slow because it has # to get all the dist names by scraping :-( n1 = loc1.get_distribution_names() n2 = loc2.get_distribution_names() self.assertEqual(locator.get_distribution_names(), n1 | n2)
def test_unicode_project_name(self): # Just checking to see that no exceptions are raised. NAME = '\u2603' locator = SimpleScrapingLocator('https://pypi.python.org/simple/') result = locator.get_project(NAME) self.assertFalse(result) locator = PyPIJSONLocator('https://pypi.python.org/pypi/') result = locator.get_project(NAME) self.assertFalse(result)
def test_unicode_project_name(self): # Just checking to see that no exceptions are raised. NAME = "\u2603" locator = SimpleScrapingLocator("https://pypi.python.org/simple/") result = locator.get_project(NAME) expected = {"urls": {}, "digests": {}} self.assertEqual(result, expected) locator = PyPIJSONLocator("https://pypi.python.org/pypi/") result = locator.get_project(NAME) self.assertEqual(result, expected)
def test_unicode_project_name(self): # Just checking to see that no exceptions are raised. NAME = '\u2603' locator = SimpleScrapingLocator('https://pypi.python.org/simple/') result = locator.get_project(NAME) expected = {'urls': {}, 'digests': {}} self.assertEqual(result, expected) locator = PyPIJSONLocator('https://pypi.python.org/pypi/') result = locator.get_project(NAME) self.assertEqual(result, expected)
def test_scraper(self): locator = SimpleScrapingLocator("https://pypi.python.org/simple/") for name in ("sarge", "Sarge"): result = locator.get_project(name) self.assertIn("0.1", result) dist = result["0.1"] self.assertEqual(dist.name, "sarge") self.assertEqual(dist.version, "0.1") self.assertTrue(dist.source_url.endswith("/sarge-0.1.tar.gz")) self.assertEqual(dist.digest, ("md5", "961ddd9bc085fdd8b248c6dd96ceb1c8")) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def test_scraper(self): locator = SimpleScrapingLocator('https://pypi.python.org/simple/') for name in ('sarge', 'Sarge'): result = locator.get_project(name) self.assertIn('0.1', result) dist = result['0.1'] self.assertEqual(dist.name, 'sarge') self.assertEqual(dist.version, '0.1') self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz')) self.assertEqual(dist.digest, ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8')) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def test_prereleases(self): locator = AggregatingLocator(JSONLocator(), SimpleScrapingLocator( 'https://pypi.python.org/simple/', timeout=3.0), scheme='legacy') REQT = 'SQLAlchemy (>0.5.8, < 0.6)' finder = DependencyFinder(locator) d = locator.locate(REQT) self.assertIsNone(d) d = locator.locate(REQT, True) self.assertIsNotNone(d) self.assertEqual(d.name_and_version, 'SQLAlchemy (0.6beta3)') dist = make_dist('dummy', '0.1') dist.metadata.run_requires = [{'requires': [REQT]}] dists, problems = finder.find(dist, prereleases=True) self.assertFalse(problems) actual = sorted(dists, key=lambda o: o.name_and_version) self.assertEqual(actual[0].name_and_version, 'SQLAlchemy (0.6beta3)') dists, problems = finder.find(dist) # Test changed since now prereleases as found as a last resort. #self.assertEqual(dists, set([dist])) #self.assertEqual(len(problems), 1) #problem = problems.pop() #self.assertEqual(problem, ('unsatisfied', REQT)) self.assertEqual(dists, set([actual[0], dist])) self.assertFalse(problems)
def test_scraper(self): locator = SimpleScrapingLocator('https://pypi.python.org/simple/') for name in ('sarge', 'Sarge'): result = locator.get_project(name) self.assertIn('0.1', result) dist = result['0.1'] self.assertEqual(dist.name, 'sarge') self.assertEqual(dist.version, '0.1') self.assertEqual(dist.source_url, 'https://pypi.python.org/packages/source/s/sarge/' 'sarge-0.1.tar.gz') self.assertEqual(dist.digest, ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8')) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def locate(simple_url, pkg, dest, work): locator = SimpleScrapingLocator(simple_url) dist = locator.locate(pkg) url = list(dist.download_urls)[0] download = request.urlopen(url) fname = url.split('/')[-1] with open(os.path.join(work, fname), 'wb') as f: f.write(download.read()) wheel = Wheel(os.path.join(work, fname)) print(wheel.name) paths = { 'purelib': dest, 'platlib': dest, 'prefix': dest, 'headers': dest, 'scripts': dest, 'data': dest, } maker = ScriptMaker(None, None) wheel.install(paths, maker)
def test_scraper(self): locator = SimpleScrapingLocator('https://pypi.org/simple/') for name in ('sarge', 'Sarge'): result = locator.get_project(name) self.assertIn('0.1', result) dist = result['0.1'] self.assertEqual(dist.name, 'sarge') self.assertEqual(dist.version, '0.1') self.assertTrue(dist.source_url.endswith('/sarge-0.1.tar.gz')) SARGE_HASHES = ( ('md5', '961ddd9bc085fdd8b248c6dd96ceb1c8'), ('sha256', 'ec2ec0b1c9ed9a77f9b4322c16e4954c93aa00d974a1af931b18eb751e377dfe' ), ) self.assertIn(dist.digest, SARGE_HASHES) # Test to check issue #112 fix. locator.wheel_tags = [('cp27', 'cp27m', 'win_amd64'), ('cp35', 'cp35m', 'win32')] result = locator.get_project('simplejson') urls = result['urls'].get('3.16.0') self.assertTrue(urls) self.assertEqual(3, len(urls)) expected = set([ 'simplejson-3.16.0-cp27-cp27m-win_amd64.whl', 'simplejson-3.16.0-cp35-cp35m-win32.whl', 'simplejson-3.16.0.tar.gz' ]) for u in urls: p = posixpath.split(urlparse(u).path)[-1] self.assertIn(p, expected) return # The following is too slow names = locator.get_distribution_names() self.assertGreater(len(names), 25000)
def main(args=None): parser = optparse.OptionParser(usage='%prog [options] requirement [requirement ...]') parser.add_option('-d', '--dest', dest='destdir', metavar='DESTDIR', default=os.getcwd(), help='Where you want the wheels ' 'to be put.') parser.add_option('-n', '--no-deps', dest='deps', default=True, action='store_false', help='Don\'t build dependent wheels.') options, args = parser.parse_args(args) options.compatible = True # may add flag to turn off later if not args: parser.print_usage() else: # Check if pip is available; no point in continuing, otherwise try: with open(os.devnull, 'w') as f: p = subprocess.call(['pip', '--version'], stdout=f, stderr=subprocess.STDOUT) except Exception: p = 1 if p: print('pip appears not to be available. Wheeler needs pip to ' 'build wheels.') return 1 if options.deps: # collect all the requirements, including dependencies u = 'http://pypi.python.org/simple/' locator = AggregatingLocator(JSONLocator(), SimpleScrapingLocator(u, timeout=3.0), scheme='legacy') finder = DependencyFinder(locator) wanted = set() for arg in args: r = parse_requirement(arg) if not r.constraints: dname = r.name else: dname = '%s (%s)' % (r.name, ', '.join(r.constraints)) print('Finding the dependencies of %s ...' % arg) dists, problems = finder.find(dname) if problems: print('There were some problems resolving dependencies ' 'for %r.' % arg) for _, info in problems: print(' Unsatisfied requirement %r' % info) wanted |= dists want_ordered = True # set to False to skip ordering if not want_ordered: wanted = list(wanted) else: graph = make_graph(wanted, scheme=locator.scheme) slist, cycle = graph.topological_sort() if cycle: # Now sort the remainder on dependency count. cycle = sorted(cycle, reverse=True, key=lambda d: len(graph.reverse_list[d])) wanted = slist + cycle # get rid of any installed distributions from the list for w in list(wanted): dist = INSTALLED_DISTS.get_distribution(w.name) if dist or w.name in ('setuptools', 'distribute'): wanted.remove(w) s = w.name_and_version print('Skipped already-installed distribution %s' % s) # converted wanted list to pip-style requirements args = ['%s==%s' % (dist.name, dist.version) for dist in wanted] # Now go build built = [] for arg in args: wheel = build_wheel(arg, options) if wheel: built.append(wheel) if built: if options.destdir == os.getcwd(): dest = '' else: dest = ' in %s' % options.destdir print('The following wheels were built%s:' % dest) for wheel in built: print(' %s' % wheel.filename)
def test_dependency_finder(self): locator = AggregatingLocator(JSONLocator(), SimpleScrapingLocator( 'https://pypi.python.org/simple/', timeout=3.0), scheme='legacy') finder = DependencyFinder(locator) dists, problems = finder.find('irc (== 5.0.1)') self.assertFalse(problems) actual = sorted([d.name for d in dists]) self.assertEqual(actual, ['hgtools', 'irc', 'pytest-runner', 'setuptools_scm']) dists, problems = finder.find('irc (== 5.0.1)', meta_extras=[':test:']) self.assertFalse(problems) actual = sorted([d.name for d in dists]) self.assertEqual(actual, [ 'hgtools', 'irc', 'py', 'pytest', 'pytest-runner', 'setuptools_scm' ]) g = make_graph(dists) slist, cycle = g.topological_sort() self.assertFalse(cycle) names = [d.name for d in slist] expected = set([ ('setuptools_scm', 'hgtools', 'py', 'pytest-runner', 'pytest', 'irc'), ('setuptools_scm', 'hgtools', 'py', 'pytest', 'pytest-runner', 'irc'), ('setuptools_scm', 'py', 'hgtools', 'pytest-runner', 'pytest', 'irc'), ('hgtools', 'setuptools_scm', 'py', 'pytest', 'pytest-runner', 'irc'), ('py', 'hgtools', 'setuptools_scm', 'pytest', 'pytest-runner', 'irc'), ('hgtools', 'setuptools_scm', 'py', 'pytest-runner', 'pytest', 'irc'), ('py', 'hgtools', 'setuptools_scm', 'pytest-runner', 'pytest', 'irc'), ('py', 'setuptools_scm', 'hgtools', 'pytest', 'pytest-runner', 'irc'), ('pytest', 'setuptools_scm', 'hgtools', 'pytest-runner', 'irc'), ('hgtools', 'setuptools_scm', 'pytest', 'pytest-runner', 'irc'), ('py', 'setuptools_scm', 'hgtools', 'pytest-runner', 'pytest', 'irc'), ('py', 'setuptools_scm', 'pytest', 'pytest-runner', 'hgtools', 'irc'), ('py', 'setuptools_scm', 'pytest-runner', 'pytest', 'hgtools', 'irc'), ('py', 'setuptools_scm', 'pytest', 'hgtools', 'pytest-runner', 'irc'), ('setuptools_scm', 'py', 'pytest', 'hgtools', 'pytest-runner', 'irc'), ('setuptools_scm', 'py', 'pytest-runner', 'hgtools', 'pytest', 'irc'), ('py', 'setuptools_scm', 'pytest-runner', 'hgtools', 'pytest', 'irc'), ('setuptools_scm', 'py', 'pytest', 'pytest-runner', 'hgtools', 'irc'), ]) self.assertIn(tuple(names), expected) # Test with extras dists, problems = finder.find('Jinja2 (== 2.6)') self.assertFalse(problems) actual = sorted([d.name_and_version for d in dists]) self.assertEqual(actual, ['Jinja2 (2.6)']) dists, problems = finder.find('Jinja2 [i18n] (== 2.6)') self.assertFalse(problems) actual = sorted([d.name_and_version for d in dists]) self.assertEqual(actual[-2], 'Jinja2 (2.6)') self.assertTrue(actual[-1].startswith('pytz (')) self.assertTrue(actual[0].startswith('Babel (')) actual = [d.build_time_dependency for d in dists] self.assertEqual(actual, [False, False, False]) # Now test with extra in dependency locator.clear_cache() dummy = make_dist('dummy', '0.1') dummy.metadata.run_requires = [{'requires': ['Jinja2 [i18n]']}] dists, problems = finder.find(dummy) self.assertFalse(problems) actual = sorted([d.name_and_version for d in dists]) self.assertTrue(actual[0].startswith('Babel (')) locator.clear_cache() dummy.metadata.run_requires = [{'requires': ['Jinja2']}] dists, problems = finder.find(dummy) self.assertFalse(problems) actual = sorted([d.name_and_version for d in dists]) self.assertTrue(actual[0].startswith('Jinja2 ('))