def test_pip_style(self): self.assertEqual(spec_from_line('foo>=1.0'), 'foo >=1.0') self.assertEqual(spec_from_line('foo >=1.0'), 'foo >=1.0') self.assertEqual(spec_from_line('FOO-Bar >=1.0'), 'foo-bar >=1.0') self.assertEqual(spec_from_line('foo >= 1.0'), 'foo >=1.0') self.assertEqual(spec_from_line('foo > 1.0'), 'foo >1.0') self.assertEqual(spec_from_line('foo != 1.0'), 'foo !=1.0') self.assertEqual(spec_from_line('foo != 1!1.0'), 'foo !=1!1.0') self.assertEqual(spec_from_line('foo <1.0'), 'foo <1.0') self.assertEqual(spec_from_line('foo >=1.0 , < 2.0'), 'foo >=1.0,<2.0')
def test_pip_style(self): self.assertEqual(spec_from_line("foo>=1.0"), "foo >=1.0") self.assertEqual(spec_from_line("foo >=1.0"), "foo >=1.0") self.assertEqual(spec_from_line("FOO-Bar >=1.0"), "foo-bar >=1.0") self.assertEqual(spec_from_line("foo >= 1.0"), "foo >=1.0") self.assertEqual(spec_from_line("foo > 1.0"), "foo >1.0") self.assertEqual(spec_from_line("foo != 1.0"), "foo !=1.0") self.assertEqual(spec_from_line("foo <1.0"), "foo <1.0") self.assertEqual(spec_from_line("foo >=1.0 , < 2.0"), "foo >=1.0,<2.0")
def test_pip_style(self): assert spec_from_line('foo>=1.0') == 'foo >=1.0' == self.cb_form('foo>=1.0') assert spec_from_line('foo >=1.0') == 'foo >=1.0' == self.cb_form('foo >=1.0') assert spec_from_line('FOO-Bar >=1.0') == 'foo-bar >=1.0' == self.cb_form('FOO-Bar >=1.0') assert spec_from_line('foo >= 1.0') == 'foo >=1.0' == self.cb_form('foo >= 1.0') assert spec_from_line('foo > 1.0') == 'foo >1.0' == self.cb_form('foo > 1.0') assert spec_from_line('foo != 1.0') == 'foo !=1.0' == self.cb_form('foo != 1.0') assert spec_from_line('foo <1.0') == 'foo <1.0' == self.cb_form('foo <1.0') assert spec_from_line('foo >=1.0 , < 2.0') == 'foo >=1.0,<2.0' == self.cb_form('foo >=1.0 , < 2.0')
def test_conda_style(self): self.assertEqual(spec_from_line('foo'), 'foo') self.assertEqual(spec_from_line('foo=1.0'), 'foo 1.0') self.assertEqual(spec_from_line('foo=1.0*'), 'foo 1.0*') self.assertEqual(spec_from_line('foo=1!1.0*'), 'foo 1!1.0*') self.assertEqual(spec_from_line('foo=1.0|1.2'), 'foo 1.0|1.2') self.assertEqual(spec_from_line('foo=1.0=2'), 'foo 1.0 2') self.assertEqual(spec_from_line('foo=1!1.0=2'), 'foo 1!1.0 2')
def test_pip_style(self): assert spec_from_line('foo>=1.0') == 'foo >=1.0' == self.cb_form( 'foo>=1.0') assert spec_from_line('foo >=1.0') == 'foo >=1.0' == self.cb_form( 'foo >=1.0') assert spec_from_line( 'FOO-Bar >=1.0') == 'foo-bar >=1.0' == self.cb_form( 'FOO-Bar >=1.0') assert spec_from_line('foo >= 1.0') == 'foo >=1.0' == self.cb_form( 'foo >= 1.0') assert spec_from_line('foo > 1.0') == 'foo >1.0' == self.cb_form( 'foo > 1.0') assert spec_from_line('foo != 1.0') == 'foo !=1.0' == self.cb_form( 'foo != 1.0') assert spec_from_line('foo <1.0') == 'foo <1.0' == self.cb_form( 'foo <1.0') assert spec_from_line( 'foo >=1.0 , < 2.0') == 'foo >=1.0,<2.0' == self.cb_form( 'foo >=1.0 , < 2.0')
def test_comment(self): assert spec_from_line('foo # comment') == 'foo' == self.cb_form('foo # comment') assert spec_from_line('foo ## comment') == 'foo' == self.cb_form('foo ## comment')
def test_conda_style(self): assert spec_from_line('foo') == 'foo' == self.cb_form('foo') assert spec_from_line('foo=1.0=2') == 'foo 1.0 2' == self.cb_form('foo=1.0=2')
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum with Locked(config.croot): d = defaultdict(dict) # PyPI allows uppercase letters but conda does not, so we fix the # name here. d['package']['name'] = metadata.name.lower() d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [spec_from_line(i) for i in self.distribution.tests_require or []] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = metadata.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.splitlines()) c = configparser.ConfigParser() entry_points = {} try: c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError("ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + entry_points) else: rs = entry_points.get('scripts', []) cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not rs and not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = rs + cs + gs if gs and conda.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list(map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: d['test']['imports'] = ((self.distribution.packages or []) + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list(map(unicode, metadata.conda_command_tests)) d = dict(d) m = MetaData.fromdict(d) # Shouldn't fail, but do you really trust the code above? m.check_fields() build.build(m, post=False) # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() build.build(m, post=True) build.test(m) if self.binstar_upload: class args: binstar_upload = self.binstar_upload handle_binstar_upload(render.bldpkg_path(m), args) else: no_upload_message = """\ # If you want to upload this package to anaconda.org later, type: # # $ anaconda upload %s """ % render.bldpkg_path(m) print(no_upload_message)
def test_invalid(self): assert spec_from_line('=') is None assert spec_from_line('foo 1.0') is None
def test_conda_style(self): assert spec_from_line('foo') == 'foo' == self.cb_form('foo') assert spec_from_line('foo=1.0=2') == 'foo 1.0 2' == self.cb_form( 'foo=1.0=2')
def main(args, parser): if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: package = args.packages.pop() # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'entry_comment': '#', 'egg_comment': '#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent + d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index(package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] d['summary'] = repr(data['summary']) license_classifier = "License :: OSI Approved ::" if 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if data['license']: if args.noprompt: license = data['license'] elif '\n' not in data['license']: print('Using "%s" for the license' % data['license']) license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + package) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + sorted(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w', encoding='utf-8') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w', encoding='utf-8') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w', encoding='utf-8') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def main(args, parser): if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: package = args.packages.pop() dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'entry_comment': '#', 'egg_comment': '#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent + d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index(package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'], package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] d['summary'] = repr(data['summary']) license_classifier = "License :: OSI Approved ::" if 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if data['license']: if args.noprompt: license = data['license'] elif '\n' not in data['license']: print('Using "%s" for the license' % data['license']) license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + package) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda and entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) if pkginfo['install_requires'] or setuptools_build or setuptools_run: if isinstance(pkginfo['install_requires'], string_types): pkginfo['install_requires'] = [pkginfo['install_requires']] deps = [] for dep in pkginfo['install_requires']: spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: if not exists(join(output_dir, dep)): args.packages.append(dep) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + sorted(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w', encoding='utf-8') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w', encoding='utf-8') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w', encoding='utf-8') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def test_comment(self): self.assertEqual(spec_from_line('foo # comment'), 'foo') self.assertEqual(spec_from_line('foo ## comment'), 'foo')
def test_conda_style(self): self.assertEqual(spec_from_line('foo'), 'foo') self.assertEqual(spec_from_line('foo=1.0'), 'foo 1.0') self.assertEqual(spec_from_line('foo=1.0*'), 'foo 1.0*') self.assertEqual(spec_from_line('foo=1.0|1.2'), 'foo 1.0|1.2') self.assertEqual(spec_from_line('foo=1.0=2'), 'foo 1.0 2')
def test_invalid(self): self.assertEqual(spec_from_line('='), None) self.assertEqual(spec_from_line('foo 1.0'), None)
def get_package_metadata(args, package, d, data): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + d['filename']) [output_dir] = args.output_dir if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) if isinstance(cs, string_types): cs = [cs] if isinstance(gs, string_types): gs = [gs] # We have *other* kinds of entry-points so we need # setuptools at run-time if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = INDENT.join([''] + make_entry_tests(entry_list)) # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] if setuptools_run: deps.append('setuptools') for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if 'packagename' not in d: d['packagename'] = pkginfo['name'].lower() if d['version'] == 'UNKNOWN': d['version'] = pkginfo['version'] if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: if not d['import_tests'] or d['import_tests'] == 'PLACEHOLDER': olddeps = [] else: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = INDENT.join(sorted(deps)) d['import_comment'] = '' if pkginfo['homeurl'] is not None: d['homeurl'] = pkginfo['homeurl'] else: if data: d['homeurl'] = data['homeurl'] else: d['homeurl'] = "The package home page" d['home_comment'] = '#' if pkginfo['summary']: d['summary'] = repr(pkginfo['summary']) else: if data: d['summary'] = repr(data['summary']) else: d['summary'] = "Summary of the package" d['summary_comment'] = '#' license_classifier = "License :: OSI Approved :: " if pkginfo['classifiers']: licenses = [classifier.split(license_classifier, 1)[1] for classifier in pkginfo['classifiers'] if classifier.startswith(license_classifier)] elif data and 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if pkginfo['license']: license = pkginfo['license'] elif data and 'license' in data: license = data['license'] else: license = None if license: if args.noprompt: pass elif '\n' not in license: print('Using "%s" for the license' % license) else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(license) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI or in the source. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license finally: rm_rf(tempdir)
def get_package_metadata(args, package, d, data): print("Downloading %s" % package) [output_dir] = args.output_dir pkginfo = get_pkginfo(package, filename=d['filename'], pypiurl=d['pypiurl'], md5=d['md5'], python_version=args.python_version) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) if isinstance(cs, string_types): cs = [cs] if isinstance(gs, string_types): gs = [gs] # We have *other* kinds of entry-points so we need # setuptools at run-time if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = INDENT.join([''] + make_entry_tests(entry_list)) requires = get_requirements(package, pkginfo, all_extras=args.all_extras) if requires or setuptools_build or setuptools_run: deps = [] if setuptools_run: deps.append('setuptools') for deptext in requires: if isinstance(deptext, string_types): deptext = deptext.split('\n') # Every item may be a single requirement # or a multiline requirements string... for dep in deptext: #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): if dep not in args.created_recipes: args.packages.append(dep) if d['build_comment'] == '': if args.noarch_python: d['noarch_python_comment'] = '' if 'packagename' not in d: d['packagename'] = pkginfo['name'].lower() if d['version'] == 'UNKNOWN': d['version'] = pkginfo['version'] if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: if not d['import_tests'] or d['import_tests'] == 'PLACEHOLDER': olddeps = [] else: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = INDENT.join(sorted(deps)) d['import_comment'] = '' d['tests_require'] = INDENT.join(sorted([spec_from_line(pkg) for pkg in pkginfo['tests_require']])) if pkginfo['homeurl'] is not None: d['homeurl'] = pkginfo['homeurl'] else: if data and 'homeurl' in data: d['homeurl'] = data['homeurl'] else: d['homeurl'] = "The package home page" d['home_comment'] = '#' if pkginfo['summary']: d['summary'] = repr(pkginfo['summary']) else: if data: d['summary'] = repr(data['summary']) else: d['summary'] = "Summary of the package" d['summary_comment'] = '#' if d['summary'].startswith("u'"): d['summary'] = d['summary'][1:] license_classifier = "License :: OSI Approved :: " if pkginfo['classifiers']: licenses = [classifier.split(license_classifier, 1)[1] for classifier in pkginfo['classifiers'] if classifier.startswith(license_classifier)] elif data and 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if pkginfo['license']: license = pkginfo['license'] elif data and 'license' in data: license = data['license'] else: license = None if license: if args.noprompt: pass elif '\n' not in license: print('Using "%s" for the license' % license) else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(license) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI or in the source. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license
def test_invalid(self): self.assertEqual(spec_from_line("="), None) self.assertEqual(spec_from_line("foo 1.0"), None)
def test_conda_style(self): self.assertEqual(spec_from_line("foo"), "foo") self.assertEqual(spec_from_line("foo=1.0"), "foo 1.0") self.assertEqual(spec_from_line("foo=1.0*"), "foo 1.0*") self.assertEqual(spec_from_line("foo=1.0|1.2"), "foo 1.0|1.2") self.assertEqual(spec_from_line("foo=1.0=2"), "foo 1.0 2")
def get_package_metadata(args, package, d, data): print("Downloading %s" % package) [output_dir] = args.output_dir pkginfo = get_pkginfo(package, filename=d['filename'], pypiurl=d['pypiurl'], md5=d['md5'], python_version=args.python_version) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.splitlines()) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = [ '%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) if isinstance(cs, string_types): cs = [cs] if isinstance(gs, string_types): gs = [gs] # We have *other* kinds of entry-points so we need # setuptools at run-time if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True # TODO: Use pythonw for gui scripts entry_list = (cs + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = INDENT.join([''] + make_entry_tests(entry_list)) requires = get_requirements(package, pkginfo, all_extras=args.all_extras) if requires or setuptools_build or setuptools_run: deps = [] if setuptools_run: deps.append('setuptools') for deptext in requires: if isinstance(deptext, string_types): deptext = deptext.splitlines() # Every item may be a single requirement # or a multiline requirements string... for dep in deptext: # ... and may also contain comments... dep = dep.split('#')[0].strip() if dep: # ... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): if dep not in args.created_recipes: args.packages.append(dep) if args.noarch_python: d['build_comment'] = '' d['noarch_python_comment'] = '' if 'packagename' not in d: d['packagename'] = pkginfo['name'].lower() if d['version'] == 'UNKNOWN': d['version'] = pkginfo['version'] if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: if not d['import_tests'] or d['import_tests'] == 'PLACEHOLDER': olddeps = [] else: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = INDENT.join(sorted(deps)) d['import_comment'] = '' d['tests_require'] = INDENT.join( sorted([spec_from_line(pkg) for pkg in pkginfo['tests_require']])) if pkginfo['homeurl'] is not None: d['homeurl'] = pkginfo['homeurl'] else: if data and 'homeurl' in data: d['homeurl'] = data['homeurl'] else: d['homeurl'] = "The package home page" d['home_comment'] = '#' if pkginfo['summary']: d['summary'] = repr(pkginfo['summary']) else: if data: d['summary'] = repr(data['summary']) else: d['summary'] = "Summary of the package" d['summary_comment'] = '#' if d['summary'].startswith("u'") or d['summary'].startswith('u"'): d['summary'] = d['summary'][1:] license_classifier = "License :: OSI Approved :: " if pkginfo['classifiers']: licenses = [ classifier.split(license_classifier, 1)[1] for classifier in pkginfo['classifiers'] if classifier.startswith(license_classifier) ] elif data and 'classifiers' in data: licenses = [ classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier) ] else: licenses = [] if not licenses: if pkginfo['license']: license = pkginfo['license'] elif data and 'license' in data: license = data['license'] else: license = None if license: if args.noprompt: pass elif '\n' not in license: print('Using "%s" for the license' % license) else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(license) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input( ("No license could be found for %s on " + "PyPI or in the source. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license
def test_comment(self): assert spec_from_line('foo # comment') == 'foo' == self.cb_form( 'foo # comment') assert spec_from_line('foo ## comment') == 'foo' == self.cb_form( 'foo ## comment')
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum with Locked(config.croot): d = defaultdict(dict) # Needs to be lowercase d['package']['name'] = metadata.name d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [ spec_from_line(i) for i in self.distribution.tests_require or [] ] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = self.distribution.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) c = configparser.ConfigParser() entry_points = {} try: c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError( "ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = [ '%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError( "ERROR: Could not add entry points. They were:\n" + entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = cs + gs if gs and conda.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list( map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: d['test']['imports'] = ( (self.distribution.packages or []) + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list( map(unicode, metadata.conda_command_tests)) d = dict(d) m = MetaData.fromdict(d) # Shouldn't fail, but do you really trust the code above? m.check_fields() build.build(m, post=False) # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() build.build(m, post=True) build.test(m) if self.binstar_upload: class args: binstar_upload = self.binstar_upload handle_binstar_upload(build.bldpkg_path(m), args) else: no_upload_message = """\ # If you want to upload this package to binstar.org later, type: # # $ binstar upload %s """ % build.bldpkg_path(m) print(no_upload_message)
def execute(args, parser): if not os.path.exists(args.file): msg = 'Unable to locate environment file: %s\n\n' % args.file msg += "\n".join(textwrap.wrap(textwrap.dedent(""" Please verify that the above file is present and that you have permission read the file's contents. Note, you can specify the file to use by explictly adding --file=/path/to/file when calling conda env create.""").lstrip())) common.error_and_exit(msg, json=args.json) with open(args.file, 'rb') as fp: data = yaml.load(fp) if not args.name: if not 'name' in data: # TODO It would be nice to be able to format this more cleanly common.error_and_exit( 'An environment name is required.\n\n' 'You can either specify one directly with --name or you can add\n' 'a name property to your %s file.' % args.file, json=args.json ) # Note: stubbing out the args object as all of the # conda.cli.common code thinks that name will always # be specified. args.name = data['name'] prefix = common.get_prefix(args, search=False) cli_install.check_prefix(prefix, json=args.json) # TODO, add capability # common.ensure_override_channels_requires_channel(args) # channel_urls = args.channel or () specs = OrderedDict([('conda', [])]) for line in data['dependencies']: if type(line) is dict: specs.update(line) else: specs['conda'].append(common.spec_from_line(line)) for installer_type, specs in specs.items(): try: installer = get_installer(installer_type) installer.install(prefix, specs, args, data) except InvalidInstaller: sys.stderr.write(textwrap.dedent(""" Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. """).lstrip().format(installer_type) ) return -1 touch_nonadmin(prefix) if not args.json: cli_install.print_activate(args.name if args.name else prefix)