def get_download_data(args, client, package, version, is_url): data = client.release_data(package, version) if not is_url else None urls = client.release_urls(package, version) if not is_url else [package] if not is_url and not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] if not urls[0]['url']: # The package doesn't have a url, or maybe it only has a wheel. sys.exit("Error: Could not build recipe for %s. " "Could not find any valid urls." % package) U = parse_url(urls[0]['url']) if not U.path: sys.exit("Error: Could not parse url for %s: %s" % (package, U)) urls[0]['filename'] = U.path.rsplit('/')[-1] fragment = U.fragment or '' if fragment.startswith('md5='): md5 = fragment[len('md5='):] else: md5 = '' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) if args.manual_url: for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("which version should i use? ")) else: print("Using the one with the least source size") print("use --manual-url to override this behavior") min_siz, n = min([(url['size'], i) for (i, url) in enumerate(urls)]) else: n = 0 if not is_url: print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) pypiurl = urls[n]['url'] md5 = urls[n]['md5_digest'] filename = urls[n]['filename'] else: print("Using url %s" % package) pypiurl = package U = parse_url(package) if U.fragment and U.fragment.startswith('md5='): md5 = U.fragment[len('md5='):] else: md5 = '' # TODO: 'package' won't work with unpack() filename = U.path.rsplit('/', 1)[-1] or 'package' return (data, pypiurl, filename, md5)
def get_download_data(args, client, package, version, is_url): data = client.release_data(package, version) if not is_url else None urls = client.release_urls(package, version) if not is_url else [package] if not is_url and not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] if not urls[0]['url']: # The package doesn't have a url, or maybe it only has a wheel. sys.exit("Error: Could not build recipe for %s. " "Could not find any valid urls." % package) U = parse_url(urls[0]['url']) if not U.path: sys.exit("Error: Could not parse url for %s: %s" % (package, U)) urls[0]['filename'] = U.path.rsplit('/')[-1] fragment = U.fragment or '' if fragment.startswith('md5='): md5 = fragment[len('md5='):] else: md5 = '' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) if args.manual_url: for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes( url['size']), url['comment_text'])) n = int(input("which version should i use? ")) else: print("Using the one with the least source size") print("use --manual-url to override this behavior") min_siz, n = min([(url['size'], i) for (i, url) in enumerate(urls)]) else: n = 0 if not is_url: print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) pypiurl = urls[n]['url'] md5 = urls[n]['md5_digest'] filename = urls[n]['filename'] or 'package' else: print("Using url %s" % package) pypiurl = package U = parse_url(package) if U.fragment and U.fragment.startswith('md5='): md5 = U.fragment[len('md5='):] else: md5 = '' # TODO: 'package' won't work with unpack() filename = U.path.rsplit('/', 1)[-1] or 'package' return (data, pypiurl, filename, md5)
def get_userandpass(proxytype='',realm=''): """a function to get username and password from terminal. can be replaced with anything like some gui""" import getpass uname = input(proxytype + ' proxy username:') pword = getpass.getpass() return uname, pword
def get_userandpass(proxytype='', realm=''): """a function to get username and password from terminal. can be replaced with anything like some gui""" import getpass uname = input(proxytype + ' proxy username:') pword = getpass.getpass() return uname, pword
def main(args, parser): client = ServerProxy(args.pypi_url) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") for package in args.packages: dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends':'', 'build_depends':'', 'entry_points':'', 'build_comment':'# ', 'test_commands':'', 'usemd5':'', 'entry_comment':'#', 'egg_comment':'#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent+d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'], package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] license_classifier = "License :: OSI Approved ::" licenses = [classifier.lstrip(license_classifier) for classifier in data['classifiers'] if classifier.startswith(license_classifier)] if not licenses: if data['license']: if args.noprompt: license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input("No license could be found for %s on PyPI. What license should I use? " % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton') if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: Do this in a subprocess. That way would be safer (the # setup.py can't mess up this code), it will allow building # multiple recipes without a setuptools import messing # everyone up, and it would prevent passing __future__ imports # through. patch_distutils(tempdir) run_setuppy(src_dir) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) setuptools_build = 'setuptools' in sys.modules setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda and entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts',[]) # We have *other* kinds of entry-points so we need setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs+gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) if pkginfo['install_requires'] or setuptools_build or setuptools_run: deps = [remove_version_information(dep).lower() for dep in pkginfo['install_requires']] if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools']*setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools']*setuptools_run + deps) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + list(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def get_proxy_username_and_pass(scheme): username = input("\n%s proxy username: "******"Password:") return username, passwd
def get_package_metadata(args, package, d, data): print("Downloading %s" % package) [output_dir] = args.output_dir pkginfo = get_pkginfo(package, filename=d['filename'], pypiurl=d['pypiurl'], md5=d['md5'], python_version=args.python_version) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.splitlines()) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = [ '%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) if isinstance(cs, string_types): cs = [cs] if isinstance(gs, string_types): gs = [gs] # We have *other* kinds of entry-points so we need # setuptools at run-time if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True # TODO: Use pythonw for gui scripts entry_list = (cs + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = INDENT.join([''] + make_entry_tests(entry_list)) requires = get_requirements(package, pkginfo, all_extras=args.all_extras) if requires or setuptools_build or setuptools_run: deps = [] if setuptools_run: deps.append('setuptools') for deptext in requires: if isinstance(deptext, string_types): deptext = deptext.splitlines() # Every item may be a single requirement # or a multiline requirements string... for dep in deptext: # ... and may also contain comments... dep = dep.split('#')[0].strip() if dep: # ... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): if dep not in args.created_recipes: args.packages.append(dep) if args.noarch_python: d['build_comment'] = '' d['noarch_python_comment'] = '' if 'packagename' not in d: d['packagename'] = pkginfo['name'].lower() if d['version'] == 'UNKNOWN': d['version'] = pkginfo['version'] if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: if not d['import_tests'] or d['import_tests'] == 'PLACEHOLDER': olddeps = [] else: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = INDENT.join(sorted(deps)) d['import_comment'] = '' d['tests_require'] = INDENT.join( sorted([spec_from_line(pkg) for pkg in pkginfo['tests_require']])) if pkginfo['homeurl'] is not None: d['homeurl'] = pkginfo['homeurl'] else: if data and 'homeurl' in data: d['homeurl'] = data['homeurl'] else: d['homeurl'] = "The package home page" d['home_comment'] = '#' if pkginfo['summary']: d['summary'] = repr(pkginfo['summary']) else: if data: d['summary'] = repr(data['summary']) else: d['summary'] = "Summary of the package" d['summary_comment'] = '#' if d['summary'].startswith("u'") or d['summary'].startswith('u"'): d['summary'] = d['summary'][1:] license_classifier = "License :: OSI Approved :: " if pkginfo['classifiers']: licenses = [ classifier.split(license_classifier, 1)[1] for classifier in pkginfo['classifiers'] if classifier.startswith(license_classifier) ] elif data and 'classifiers' in data: licenses = [ classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier) ] else: licenses = [] if not licenses: if pkginfo['license']: license = pkginfo['license'] elif data and 'license' in data: license = data['license'] else: license = None if license: if args.noprompt: pass elif '\n' not in license: print('Using "%s" for the license' % license) else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(license) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input( ("No license could be found for %s on " + "PyPI or in the source. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license
def get_package_metadata(args, package, d, data): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + d['filename']) [output_dir] = args.output_dir if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) if isinstance(cs, string_types): cs = [cs] if isinstance(gs, string_types): gs = [gs] # We have *other* kinds of entry-points so we need # setuptools at run-time if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = INDENT.join([''] + make_entry_tests(entry_list)) # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] if setuptools_run: deps.append('setuptools') for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if 'packagename' not in d: d['packagename'] = pkginfo['name'].lower() if d['version'] == 'UNKNOWN': d['version'] = pkginfo['version'] if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: if not d['import_tests'] or d['import_tests'] == 'PLACEHOLDER': olddeps = [] else: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = INDENT.join(sorted(deps)) d['import_comment'] = '' if pkginfo['homeurl'] is not None: d['homeurl'] = pkginfo['homeurl'] else: if data: d['homeurl'] = data['homeurl'] else: d['homeurl'] = "The package home page" d['home_comment'] = '#' if pkginfo['summary']: d['summary'] = repr(pkginfo['summary']) else: if data: d['summary'] = repr(data['summary']) else: d['summary'] = "Summary of the package" d['summary_comment'] = '#' license_classifier = "License :: OSI Approved :: " if pkginfo['classifiers']: licenses = [classifier.split(license_classifier, 1)[1] for classifier in pkginfo['classifiers'] if classifier.startswith(license_classifier)] elif data and 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if pkginfo['license']: license = pkginfo['license'] elif data and 'license' in data: license = data['license'] else: license = None if license: if args.noprompt: pass elif '\n' not in license: print('Using "%s" for the license' % license) else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(license) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI or in the source. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license finally: rm_rf(tempdir)
def main(args, parser): proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: [output_dir] = args.output_dir package = args.packages.pop() is_url = ':' in package if not is_url: dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, { 'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'test_comment': '', 'entry_comment': '# ', 'egg_comment': '# ', 'summary_comment': '', 'home_comment': '', }) if is_url: del d['packagename'] if is_url: d['version'] = 'UNKNOWN' else: if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index(package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) if not is_url else None urls = client.release_urls(package, d['version']) if not is_url else [package] if not is_url and not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] U = parse_url(urls[0]['url']) urls[0]['filename'] = U.path.rsplit('/')[-1] fragment = U.fragment or '' if fragment.startswith('md5='): d['usemd5'] = '' d['md5'] = fragment[len('md5='):] else: d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 if not is_url: print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] else: print("Using url %s" % package) d['pypiurl'] = package U = parse_url(package) if U.fragment.startswith('md5='): d['usemd5'] = '' d['md5'] = U.fragment[len('md5='):] else: d['usemd5'] = '#' d['md5'] = '' # TODO: 'package' won't work with unpack() d['filename'] = U.path.rsplit('/', 1)[-1] or 'package' if is_url: d['import_tests'] = 'PLACEHOLDER' else: d['import_tests'] = valid(package).lower() get_package_metadata(args, package, d, data) if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = INDENT + d['import_tests'] if d['entry_comment'] == d['import_comment'] == '# ': d['test_comment'] = '# ' for package in package_dicts: d = package_dicts[package] name = d['packagename'] makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, name, 'meta.yaml'), 'w') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, name, 'build.sh'), 'w') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, name, 'bld.bat'), 'w') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ if not args.dry_run: A = input("Warning: this branch does not work. Are you sure you want " "to continue [y/N]? ") if not A or A not in 'yY': sys.exit("Bailing") newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if not args.file: if not args.all and len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update' and not args.all: linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) if '@EXPLICIT' in specs: misc.explicit(specs, prefix) return elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) return else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ ci.is_linked(prefix, dist) for dist in linked ] vers_inst = [ dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name ] build_inst = [ m['build_number'] for m in installed_metadata if m['name'] == name ] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint, update_deps=args.update_deps) if config.always_copy or args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join( prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def main(args, parser): if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: package = args.packages.pop() # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'entry_comment': '#', 'egg_comment': '#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent + d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index(package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] d['summary'] = repr(data['summary']) license_classifier = "License :: OSI Approved ::" if 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if data['license']: if args.noprompt: license = data['license'] elif '\n' not in data['license']: print('Using "%s" for the license' % data['license']) license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + package) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + sorted(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def get_package_metadata(args, package, d, data): print("Downloading %s" % package) [output_dir] = args.output_dir pkginfo = get_pkginfo(package, filename=d['filename'], pypiurl=d['pypiurl'], md5=d['md5'], python_version=args.python_version) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) if isinstance(cs, string_types): cs = [cs] if isinstance(gs, string_types): gs = [gs] # We have *other* kinds of entry-points so we need # setuptools at run-time if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = INDENT.join([''] + make_entry_tests(entry_list)) requires = get_requirements(package, pkginfo, all_extras=args.all_extras) if requires or setuptools_build or setuptools_run: deps = [] if setuptools_run: deps.append('setuptools') for deptext in requires: if isinstance(deptext, string_types): deptext = deptext.split('\n') # Every item may be a single requirement # or a multiline requirements string... for dep in deptext: #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): if dep not in args.created_recipes: args.packages.append(dep) if d['build_comment'] == '': if args.noarch_python: d['noarch_python_comment'] = '' if 'packagename' not in d: d['packagename'] = pkginfo['name'].lower() if d['version'] == 'UNKNOWN': d['version'] = pkginfo['version'] if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: if not d['import_tests'] or d['import_tests'] == 'PLACEHOLDER': olddeps = [] else: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = INDENT.join(sorted(deps)) d['import_comment'] = '' d['tests_require'] = INDENT.join(sorted([spec_from_line(pkg) for pkg in pkginfo['tests_require']])) if pkginfo['homeurl'] is not None: d['homeurl'] = pkginfo['homeurl'] else: if data and 'homeurl' in data: d['homeurl'] = data['homeurl'] else: d['homeurl'] = "The package home page" d['home_comment'] = '#' if pkginfo['summary']: d['summary'] = repr(pkginfo['summary']) else: if data: d['summary'] = repr(data['summary']) else: d['summary'] = "Summary of the package" d['summary_comment'] = '#' if d['summary'].startswith("u'"): d['summary'] = d['summary'][1:] license_classifier = "License :: OSI Approved :: " if pkginfo['classifiers']: licenses = [classifier.split(license_classifier, 1)[1] for classifier in pkginfo['classifiers'] if classifier.startswith(license_classifier)] elif data and 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if pkginfo['license']: license = pkginfo['license'] elif data and 'license' in data: license = data['license'] else: license = None if license: if args.noprompt: pass elif '\n' not in license: print('Using "%s" for the license' % license) else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(license) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI or in the source. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license
def main(args, parser): if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: package = args.packages.pop() # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'entry_comment': '#', 'egg_comment': '#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent + d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index(package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] d['summary'] = repr(data['summary']) license_classifier = "License :: OSI Approved ::" if 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if data['license']: if args.noprompt: license = data['license'] elif '\n' not in data['license']: print('Using "%s" for the license' % data['license']) license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + package) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + sorted(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w', encoding='utf-8') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w', encoding='utf-8') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w', encoding='utf-8') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def install(args, parser, command='install'): """ conda install, conda update, and conda create """ if not args.dry_run: A = input("Warning: this branch does not work. Are you sure you want " "to continue [y/N]? ") if not A or A not in 'yY': sys.exit("Bailing") newenv = bool(command == 'create') if newenv: common.ensure_name_or_prefix(args, command) prefix = common.get_prefix(args, search=not newenv) if newenv: check_prefix(prefix, json=args.json) if config.force_32bit and plan.is_root_prefix(prefix): common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env") if command == 'update': if not args.file: if not args.all and len(args.packages) == 0: common.error_and_exit("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix, json=args.json, error_type="ValueError") if command == 'update' and not args.all: linked = ci.linked(prefix) for name in args.packages: common.arg2spec(name, json=args.json) if '=' in name: common.error_and_exit("Invalid package name: '%s'" % (name), json=args.json, error_type="ValueError") if name not in set(ci.name_dist(d) for d in linked): common.error_and_exit("package '%s' is not installed in %s" % (name, prefix), json=args.json, error_type="ValueError") if newenv and not args.no_default_packages: default_packages = config.create_default_packages[:] # Override defaults if they are specified at the command line for default_pkg in config.create_default_packages: if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) else: default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () specs = [] if args.file: for fpath in args.file: specs.extend(common.specs_from_url(fpath, json=args.json)) if '@EXPLICIT' in specs: misc.explicit(specs, prefix) return elif getattr(args, 'all', False): linked = ci.linked(prefix) if not linked: common.error_and_exit("There are no packages installed in the " "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: get_revision(args.revision, json=args.json) elif not (newenv and args.clone): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) return else: common.error_and_exit( "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): install_tar(prefix, tar_path, verbose=not args.quiet) return if args.use_local: from conda.fetch import fetch_index from conda.utils import url_path try: from conda_build.config import croot except ImportError: common.error_and_exit( "you need to have 'conda-build >= 1.7.1' installed" " to use the --use-local option", json=args.json, error_type="RuntimeError") # remove the cache such that a refetch is made, # this is necessary because we add the local build repo URL fetch_index.cache = {} if exists(croot): channel_urls = [url_path(croot)] + list(channel_urls) index = common.get_index_trap(channel_urls=channel_urls, prepend=not args.override_channels, use_cache=args.use_index_cache, unknown=args.unknown, json=args.json, offline=args.offline) if newenv and args.clone: if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError") clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index) misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) return # Don't update packages that are already up-to-date if command == 'update' and not (args.all or args.force): r = Resolve(index) orig_packages = args.packages[:] for name in orig_packages: installed_metadata = [ci.is_linked(prefix, dist) for dist in linked] vers_inst = [dist.rsplit('-', 2)[1] for dist in linked if dist.rsplit('-', 2)[0] == name] build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name] try: assert len(vers_inst) == 1, name assert len(build_inst) == 1, name except AssertionError as e: if args.json: common.exception_and_exit(e, json=True) else: raise pkgs = sorted(r.get_pkgs(MatchSpec(name))) if not pkgs: # Shouldn't happen? continue latest = pkgs[-1] if (latest.version == vers_inst[0] and latest.build_number == build_inst[0]): args.packages.remove(name) if not args.packages: from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(orig_packages) print('# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if args.force: args.no_deps = True spec_names = set(s.split()[0] for s in specs) if args.no_deps: only_names = spec_names else: only_names = None if not isdir(prefix) and not newenv: if args.mkdir: try: os.makedirs(prefix) except OSError: common.error_and_exit("Error: could not create directory: %s" % prefix, json=args.json, error_type="OSError") else: common.error_and_exit("""\ environment does not exist: %s # # Use 'conda create' to create an environment before installing packages # into it. #""" % prefix, json=args.json, error_type="NoEnvironmentFound") try: if command == 'install' and args.revision: actions = plan.revert_actions(prefix, get_revision(args.revision)) else: with common.json_progress_bars(json=args.json and not args.quiet): actions = plan.install_actions(prefix, index, specs, force=args.force, only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint, update_deps=args.update_deps) if config.always_copy or args.copy: new_link = [] for pkg in actions["LINK"]: dist, pkgs_dir, lt = inst.split_linkarg(pkg) lt = ci.LINK_COPY new_link.append("%s %s %d" % (dist, pkgs_dir, lt)) actions["LINK"] = new_link except NoPackagesFound as e: error_message = e.args[0] if command == 'update' and args.all: # Packages not found here just means they were installed but # cannot be found any more. Just skip them. if not args.json: print("Warning: %s, skipping" % error_message) else: # Not sure what to do here pass args._skip = getattr(args, '_skip', ['anaconda']) for pkg in e.pkgs: p = pkg.split()[0] if p in args._skip: # Avoid infinite recursion. This can happen if a spec # comes from elsewhere, like --file raise args._skip.append(p) return install(args, parser, command=command) else: packages = {index[fn]['name'] for fn in index} for pkg in e.pkgs: close = get_close_matches(pkg, packages, cutoff=0.7) if close: error_message += ("\n\nDid you mean one of these?" "\n\n %s" % (', '.join(close))) error_message += '\n\nYou can search for this package on anaconda.org with' error_message += '\n\n anaconda search -t conda %s' % pkg if len(e.pkgs) > 1: # Note this currently only happens with dependencies not found error_message += '\n\n (and similarly for the other packages)' if not find_executable('anaconda', include_others=False): error_message += '\n\nYou may need to install the anaconda-client command line client with' error_message += '\n\n conda install anaconda-client' pinned_specs = plan.get_pinned_specs(prefix) if pinned_specs: error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned') error_message += "\n\n %r" % pinned_specs common.error_and_exit(error_message, json=args.json) except SystemExit as e: # Unsatisfiable package specifications/no such revision/import error error_type = 'UnsatisfiableSpecifications' if e.args and 'could not import' in e.args[0]: error_type = 'ImportError' common.exception_and_exit(e, json=args.json, newline=True, error_text=False, error_type=error_type) if plan.nothing_to_do(actions): from conda.cli.main_list import print_packages if not args.json: regex = '^(%s)$' % '|'.join(spec_names) print('\n# All requested packages already installed.') print_packages(prefix, regex) else: common.stdout_json_success( message='All requested packages already installed.') return if not args.json: print() print("Package plan for installation in environment %s:" % prefix) plan.display_actions(actions, index, show_channel_urls=args.show_channel_urls) if command in {'install', 'update'}: common.check_write(command, prefix) if not args.json: common.confirm_yn(args) elif args.dry_run: common.stdout_json_success(actions=actions, dry_run=True) sys.exit(0) with common.json_progress_bars(json=args.json and not args.quiet): try: plan.execute_actions(actions, index, verbose=not args.quiet) if not (command == 'update' and args.all): try: with open(join(prefix, 'conda-meta', 'history'), 'a') as f: f.write('# %s specs: %s\n' % (command, specs)) except IOError as e: if e.errno == errno.EACCES: log.debug("Can't write the history file") else: raise except RuntimeError as e: if len(e.args) > 0 and "LOCKERROR" in e.args[0]: error_type = "AlreadyLocked" else: error_type = "RuntimeError" common.exception_and_exit(e, error_type=error_type, json=args.json) except SystemExit as e: common.exception_and_exit(e, json=args.json) if newenv: misc.append_env(prefix) misc.touch_nonadmin(prefix) if not args.json: print_activate(args.name if args.name else prefix) if args.json: common.stdout_json_success(actions=actions)
def main(args, parser): client = ServerProxy(args.pypi_url) package_dicts = {} [output_dir] = args.output_dir indent = "\n - " if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") for package in args.packages: dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault( package, { "packagename": package.lower(), "run_depends": "", "build_depends": "", "entry_points": "", "build_comment": "# ", "test_commands": "", "usemd5": "", }, ) d["import_tests"] = valid(package).lower() if d["import_tests"] == "": d["import_comment"] = "# " else: d["import_comment"] = "" d["import_tests"] = indent + d["import_tests"] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d["version"] = version else: versions = client.package_releases(package) if not versions: sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d["version"] = versions[0] data = client.release_data(package, d["version"]) urls = client.release_urls(package, d["version"]) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url["python_version"] == "source"] if not urls: if "download_url" in data: urls = [defaultdict(str, {"url": data["download_url"]})] urls[0]["filename"] = urls[0]["url"].split("/")[-1] d["usemd5"] = "#" else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url["url"], human_bytes(url["size"]), url["comment_text"])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]["url"], urls[n]["size"], package)) d["pypiurl"] = urls[n]["url"] d["md5"] = urls[n]["md5_digest"] d["filename"] = urls[n]["filename"] d["homeurl"] = data["home_page"] license_classifier = "License :: OSI Approved ::" licenses = [ classifier.lstrip(license_classifier) for classifier in data["classifiers"] if classifier.startswith(license_classifier) ] if not licenses: if data["license"]: if args.noprompt: license = data["license"] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data["license"]) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input("No license could be found for %s on PyPI. What license should I use? " % package) else: license = " or ".join(licenses) d["license"] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp("conda_skeleton") if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d["filename"]) if not isfile(download_path) or hashsum_file(download_path, "md5") != d["md5"]: download(d["pypiurl"], join(SRC_CACHE, d["filename"])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d["filename"]), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: Do this in a subprocess. That way would be safer (the # setup.py can't mess up this code), it will allow building # multiple recipes without a setuptools import messing # everyone up, and it would prevent passing __future__ imports # through. patch_distutils(tempdir) run_setuppy(src_dir) with open(join(tempdir, "pkginfo.yaml")) as fn: pkginfo = yaml.load(fn) uses_distribute = "setuptools" in sys.modules if pkginfo["install_requires"] or uses_distribute: deps = [remove_version_information(dep).lower() for dep in pkginfo["install_requires"]] if "setuptools" in deps: deps.remove("setuptools") if "distribute" not in deps: deps.append("distribute") uses_distribute = False d["build_depends"] = indent.join([""] + ["distribute"] * uses_distribute + deps) ### Could be more discriminatory but enough ### packages also need distribute at run_time... d["run_depends"] = indent.join([""] + ["distribute"] * uses_distribute + deps) if pkginfo["entry_points"]: if not isinstance(pkginfo["entry_points"], dict): print("WARNING: Could not add entry points. They were:") print(pkginfo["entry_points"]) else: entry_list = ( pkginfo["entry_points"].get("console_scripts", []) # TODO: Use pythonw for these + pkginfo["entry_points"].get("gui_scripts", []) ) d["entry_points"] = indent.join([""] + entry_list) d["build_comment"] = "" d["test_commands"] = indent.join([""] + make_entry_tests(entry_list)) if pkginfo["packages"]: deps = set(pkginfo["packages"]) if d["import_tests"]: deps = set([d["import_tests"]]) | deps d["import_tests"] = indent.join([""] + list(deps)) d["import_comment"] = "" finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), "meta.yaml"), "w") as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), "build.sh"), "w") as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), "bld.bat"), "w") as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")