def download_to_cache(meta): ''' Download a source to the local cache. ''' print('Source cache directory is: %s' % SRC_CACHE) if not isdir(SRC_CACHE): os.makedirs(SRC_CACHE) fn = meta['fn'] path = join(SRC_CACHE, fn) if isfile(path): print('Found source in cache: %s' % fn) else: print('Downloading source to cache: %s' % fn) if not isinstance(meta['url'], list): meta['url'] = [meta['url']] for url in meta['url']: try: print("Downloading %s" % url) download(url, path) except RuntimeError as e: print("Error: %s" % str(e).strip(), file=sys.stderr) else: print("Success") break else: # no break raise RuntimeError("Could not download %s" % fn) for tp in 'md5', 'sha1', 'sha256': if meta.get(tp) and hashsum_file(path, tp) != meta[tp]: raise RuntimeError("%s mismatch: '%s' != '%s'" % (tp.upper(), hashsum_file(path, tp), meta[tp])) return path
def get_pkginfo(package, filename, pypiurl, md5, python_version): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. tempdir = mkdtemp('conda_skeleton_' + filename) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, filename) if not isfile(download_path) or \ hashsum_file(download_path, 'md5') != md5: download(pypiurl, join(SRC_CACHE, filename)) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, filename), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy(src_dir, tempdir, python_version) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) finally: rm_rf(tempdir) return pkginfo
def get_pkginfo(package, filename, pypiurl, md5, python_version): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. import yaml tempdir = mkdtemp('conda_skeleton_' + filename) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, filename) if not isfile(download_path) or \ hashsum_file(download_path, 'md5') != md5: download(pypiurl, join(SRC_CACHE, filename)) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, filename), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy(src_dir, tempdir, python_version) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) finally: rm_rf(tempdir) return pkginfo
def download_site_PACKAGES(output_dir): paths = [] for (site, base_url) in SITES.iteritems(): name = '%s.PACKAGES' % site path = join(output_dir, name) url = '/'.join((base_url, 'PACKAGES')) download(url, path) paths.append((site, path))
def test_package_test(): """Test calling conda build -t <package file> - rather than <recipe dir>""" filename = "jinja2-2.8-py{}{}_0.tar.bz2".format(sys.version_info.major, sys.version_info.minor) downloaded_file = os.path.join(sys.prefix, 'conda-bld', subdir, filename) if not os.path.isfile(downloaded_file): download('https://anaconda.org/conda-forge/jinja2/2.8/download/{}/{}'.format(subdir, filename), # noqa downloaded_file) subprocess.check_call(["conda", "build", "--test", downloaded_file])
def test_download_httperror(self): with pytest.raises(CondaRuntimeError) as execinfo: url = DEFAULT_CHANNEL_ALIAS msg = "HTTPError:" responses.add(responses.GET, url, body='{"error": "not found"}', status=404, content_type='application/json') download(url, mktemp()) assert msg in str(execinfo)
def test_windows_entry_point(self): """ This emulates pip-created entry point executables on windows. For more info, refer to conda/install.py::replace_entry_point_shebang """ tmp_dir = tempfile.mkdtemp() cwd = getcwd() chdir(tmp_dir) original_prefix = "C:\\BogusPrefix\\python.exe" try: url = 'https://bitbucket.org/vinay.sajip/pyzzer/downloads/pyzzerw.pyz' download(url, 'pyzzerw.pyz') url = 'https://files.pythonhosted.org/packages/source/c/conda/conda-4.1.6.tar.gz' download(url, 'conda-4.1.6.tar.gz') subprocess.check_call( [ sys.executable, 'pyzzerw.pyz', # output file '-o', 'conda.exe', # entry point '-m', 'conda.cli.main:main', # initial shebang '-s', '#! ' + original_prefix, # launcher executable to use (32-bit text should be compatible) '-l', 't32', # source archive to turn into executable 'conda-4.1.6.tar.gz', ], cwd=tmp_dir) # this is the actual test: change the embedded prefix and make sure that the exe runs. data = open('conda.exe', 'rb').read() fixed_data = binary_replace(data, original_prefix, sys.executable) with open("conda.fixed.exe", 'wb') as f: f.write(fixed_data) # without a valid shebang in the exe, this should fail with pytest.raises(subprocess.CalledProcessError): subprocess.check_call(['conda.exe', '-h']) process = subprocess.Popen(['conda.fixed.exe', '-h'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, error = process.communicate() output = output.decode('utf-8') error = error.decode('utf-8') assert ("conda is a tool for managing and deploying applications, " "environments and packages.") in output except: raise finally: chdir(cwd)
def test_windows_entry_point(self): """ This emulates pip-created entry point executables on windows. For more info, refer to conda/install.py::replace_entry_point_shebang """ tmp_dir = tempfile.mkdtemp() cwd = getcwd() chdir(tmp_dir) original_prefix = "C:\\BogusPrefix\\python.exe" try: url = 'https://bitbucket.org/vinay.sajip/pyzzer/downloads/pyzzerw.pyz' download(url, 'pyzzerw.pyz') url = 'https://files.pythonhosted.org/packages/source/c/conda/conda-4.1.6.tar.gz' download(url, 'conda-4.1.6.tar.gz') subprocess.check_call([sys.executable, 'pyzzerw.pyz', # output file '-o', 'conda.exe', # entry point '-m', 'conda.cli.main:main', # initial shebang '-s', '#! ' + original_prefix, # launcher executable to use (32-bit text should be compatible) '-l', 't32', # source archive to turn into executable 'conda-4.1.6.tar.gz', ], cwd=tmp_dir) # this is the actual test: change the embedded prefix and make sure that the exe runs. data = open('conda.exe', 'rb').read() fixed_data = binary_replace(data, original_prefix, sys.executable) with open("conda.fixed.exe", 'wb') as f: f.write(fixed_data) # without a valid shebang in the exe, this should fail with pytest.raises(subprocess.CalledProcessError): subprocess.check_call(['conda.exe', '-h']) process = subprocess.Popen(['conda.fixed.exe', '-h'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, error = process.communicate() output = output.decode('utf-8') error = error.decode('utf-8') assert ("conda is a tool for managing and deploying applications, " "environments and packages.") in output except: raise finally: chdir(cwd)
def download_to_cache(meta): print("Source cache directory is: %s" % SRC_CACHE) if not isdir(SRC_CACHE): os.makedirs(SRC_CACHE) fn = meta["fn"] path = join(SRC_CACHE, fn) if isfile(path): print("Found source in cache: %s" % fn) else: print("Downloading source to cache: %s" % fn) download(meta["url"], path) for tp in "md5", "sha1": if meta.get(tp) and hashsum_file(path, tp) != meta[tp]: raise RuntimeError("%s mismatch: '%s' != '%s'" % (tp.upper(), hashsum_file(path, tp), meta[tp])) return path
def download_to_cache(meta): ''' Download a source to the local cache. ''' print('Source cache directory is: %s' % SRC_CACHE) if not isdir(SRC_CACHE): os.makedirs(SRC_CACHE) fn = meta['fn'] path = join(SRC_CACHE, fn) if isfile(path): print('Found source in cache: %s' % fn) else: print('Downloading source to cache: %s' % fn) download(meta['url'], path) for tp in 'md5', 'sha1', 'sha256': if meta.get(tp) and hashsum_file(path, tp) != meta[tp]: raise RuntimeError("%s mismatch: '%s' != '%s'" % (tp.upper(), hashsum_file(path, tp), meta[tp])) return path
def download_to_cache(meta, SRC_CACHE): ''' Download a source to the local cache. ''' print('Source cache directory is: %s' % SRC_CACHE) if not isdir(SRC_CACHE): os.makedirs(SRC_CACHE) fn = meta['fn'] path = join(SRC_CACHE, fn) if isfile(path): print('Found source in cache: %s' % fn) else: print('Downloading source to cache: %s' % fn) if not isinstance(meta['url'], list): meta['url'] = [meta['url']] for url in meta['url']: print("Downloading %s" % url) download(url, path) break else: # no break raise RuntimeError("Could not download %s" % fn)
# Identify version of the last available tarball visible on # http://hgdownload.cse.ucsc.edu/admin/exe and compute its sha256; place them # in the ucsc_config.yaml file that looks like: # # version: 332 # sha256: 8c2663c7bd302a77cdf52b2e9e85e2cd ucsc_config = yaml.load(open('ucsc_config.yaml')) VERSION = ucsc_config['version'] SHA256 = ucsc_config['sha256'] # Download tarball if it doesn't exist. Always download FOOTER. tarball = ( 'http://hgdownload.cse.ucsc.edu/admin/exe/userApps.v{0}.src.tgz' .format(VERSION)) if not os.path.exists(os.path.basename(tarball)): download(tarball, os.path.basename(tarball)) download( 'http://hgdownload.cse.ucsc.edu/admin/exe/linux.x86_64/FOOTER', 'FOOTER') # Different programs are built under different subdirectories in the source. So # get a directory listing of the tarball t = tarfile.open(os.path.basename(tarball)) names = [i for i in t.getnames() if i.startswith('./userApps/kent/src')] def program_subdir(program, names): """ Identify the source directory for a program. """
# Identify version of the last available tarball visible on # http://hgdownload.cse.ucsc.edu/admin/exe and compute its sha256; place them # in the ucsc_config.yaml file that looks like: # # version: 332 # sha256: 8c2663c7bd302a77cdf52b2e9e85e2cd ucsc_config = yaml.load(open('ucsc_config.yaml')) VERSION = ucsc_config['version'] SHA256 = ucsc_config['sha256'] # Download tarball if it doesn't exist. Always download FOOTER. tarball = ('http://hgdownload.cse.ucsc.edu/admin/exe/userApps.v{0}.src.tgz'. format(VERSION)) if not os.path.exists(os.path.basename(tarball)): download(tarball, os.path.basename(tarball)) download('http://hgdownload.cse.ucsc.edu/admin/exe/linux.x86_64/FOOTER', 'FOOTER') # Different programs are built under different subdirectories in the source. So # get a directory listing of the tarball t = tarfile.open(os.path.basename(tarball)) names = [i for i in t.getnames() if i.startswith('./userApps/kent/src')] def program_subdir(program, names): """ Identify the source directory for a program. """ hits = [i for i in names if program in i and t.getmember(i).isdir()] if len(hits) == 0:
def download_cran_task_views_rds(output_dir): url = '/'.join((SITES['cran'], 'Views.rds')) path = join(output_dir, CRAN_VIEWS_RDS_NAME) download(url, path)
def chromsizes(assembly): url = ("http://hgdownload.cse.ucsc.edu/goldenPath/" "{0}/bigZips/{0}.chrom.sizes") dest = tempfile.NamedTemporaryFile(delete=False).name download(url.format(assembly), dest) return dest
def main(args, parser): client = ServerProxy(args.pypi_url) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") for package in args.packages: dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends':'', 'build_depends':'', 'entry_points':'', 'build_comment':'# ', 'test_commands':'', 'usemd5':'', 'entry_comment':'#', 'egg_comment':'#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent+d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'], package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] license_classifier = "License :: OSI Approved ::" licenses = [classifier.lstrip(license_classifier) for classifier in data['classifiers'] if classifier.startswith(license_classifier)] if not licenses: if data['license']: if args.noprompt: license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input("No license could be found for %s on PyPI. What license should I use? " % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton') if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: Do this in a subprocess. That way would be safer (the # setup.py can't mess up this code), it will allow building # multiple recipes without a setuptools import messing # everyone up, and it would prevent passing __future__ imports # through. patch_distutils(tempdir) run_setuppy(src_dir) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) setuptools_build = 'setuptools' in sys.modules setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda and entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts',[]) # We have *other* kinds of entry-points so we need setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs+gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) if pkginfo['install_requires'] or setuptools_build or setuptools_run: deps = [remove_version_information(dep).lower() for dep in pkginfo['install_requires']] if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools']*setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools']*setuptools_run + deps) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + list(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def chromsizes(assembly): url = "http://hgdownload.cse.ucsc.edu/goldenPath/" "{0}/bigZips/{0}.chrom.sizes" dest = tempfile.NamedTemporaryFile(delete=False).name download(url.format(assembly), dest) return dest
def test_download_connectionerror(self): with pytest.raises(CondaRuntimeError) as execinfo: url = "http://240.0.0.0/" msg = "Connection error:" download(url, mktemp()) assert msg in str(execinfo)
def main(args, parser): if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: package = args.packages.pop() # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'entry_comment': '#', 'egg_comment': '#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent + d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index(package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] d['summary'] = repr(data['summary']) license_classifier = "License :: OSI Approved ::" if 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if data['license']: if args.noprompt: license = data['license'] elif '\n' not in data['license']: print('Using "%s" for the license' % data['license']) license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + package) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + sorted(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w', encoding='utf-8') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w', encoding='utf-8') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w', encoding='utf-8') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def main(args, parser): if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: package = args.packages.pop() # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault(package, {'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'entry_comment': '#', 'egg_comment': '#'}) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent + d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index(package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes(url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] d['summary'] = repr(data['summary']) license_classifier = "License :: OSI Approved ::" if 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if data['license']: if args.noprompt: license = data['license'] elif '\n' not in data['license']: print('Using "%s" for the license' % data['license']) license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + package) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join([''] + make_entry_tests(entry_list)) # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + sorted(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def get_package_metadata(args, package, d, data): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + d['filename']) [output_dir] = args.output_dir if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file(download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml')) as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, config.get(section, option)) for option in config.options(section)] entry_points[section] = value if not isinstance(entry_points, dict): print("WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) if isinstance(cs, string_types): cs = [cs] if isinstance(gs, string_types): gs = [gs] # We have *other* kinds of entry-points so we need # setuptools at run-time if set(entry_points.keys()) - {'console_scripts', 'gui_scripts'}: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = INDENT.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = INDENT.join([''] + make_entry_tests(entry_list)) # Look for package[extra,...] features spec: match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package) if match_extras: package, extras = match_extras.groups() extras = extras.split(',') else: extras = [] # Extract requested extra feature requirements... if args.all_extras: extras_require = list(pkginfo['extras_require'].values()) else: try: extras_require = [pkginfo['extras_require'][x] for x in extras] except KeyError: sys.exit("Error: Invalid extra features: [%s]" % ','.join(extras)) #... and collect all needed requirement specs in a single list: requires = [] for specs in [pkginfo['install_requires']] + extras_require: if isinstance(specs, string_types): requires.append(specs) else: requires.extend(specs) if requires or setuptools_build or setuptools_run: deps = [] if setuptools_run: deps.append('setuptools') for deptext in requires: # Every item may be a single requirement # or a multiline requirements string... for dep in deptext.split('\n'): #... and may also contain comments... dep = dep.split('#')[0].strip() if dep: #... and empty (or comment only) lines spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = INDENT.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if 'packagename' not in d: d['packagename'] = pkginfo['name'].lower() if d['version'] == 'UNKNOWN': d['version'] = pkginfo['version'] if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: if not d['import_tests'] or d['import_tests'] == 'PLACEHOLDER': olddeps = [] else: olddeps = [x for x in d['import_tests'].split() if x != '-'] deps = set(olddeps) | deps d['import_tests'] = INDENT.join(sorted(deps)) d['import_comment'] = '' if pkginfo['homeurl'] is not None: d['homeurl'] = pkginfo['homeurl'] else: if data: d['homeurl'] = data['homeurl'] else: d['homeurl'] = "The package home page" d['home_comment'] = '#' if pkginfo['summary']: d['summary'] = repr(pkginfo['summary']) else: if data: d['summary'] = repr(data['summary']) else: d['summary'] = "Summary of the package" d['summary_comment'] = '#' license_classifier = "License :: OSI Approved :: " if pkginfo['classifiers']: licenses = [classifier.split(license_classifier, 1)[1] for classifier in pkginfo['classifiers'] if classifier.startswith(license_classifier)] elif data and 'classifiers' in data: licenses = [classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier)] else: licenses = [] if not licenses: if pkginfo['license']: license = pkginfo['license'] elif data and 'license' in data: license = data['license'] else: license = None if license: if args.noprompt: pass elif '\n' not in license: print('Using "%s" for the license' % license) else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(license) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input(("No license could be found for %s on " + "PyPI or in the source. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license finally: rm_rf(tempdir)
def main(args, parser): client = ServerProxy(args.pypi_url) package_dicts = {} [output_dir] = args.output_dir indent = "\n - " if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") for package in args.packages: dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault( package, { "packagename": package.lower(), "run_depends": "", "build_depends": "", "entry_points": "", "build_comment": "# ", "test_commands": "", "usemd5": "", }, ) d["import_tests"] = valid(package).lower() if d["import_tests"] == "": d["import_comment"] = "# " else: d["import_comment"] = "" d["import_tests"] = indent + d["import_tests"] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d["version"] = version else: versions = client.package_releases(package) if not versions: sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d["version"] = versions[0] data = client.release_data(package, d["version"]) urls = client.release_urls(package, d["version"]) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url["python_version"] == "source"] if not urls: if "download_url" in data: urls = [defaultdict(str, {"url": data["download_url"]})] urls[0]["filename"] = urls[0]["url"].split("/")[-1] d["usemd5"] = "#" else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url["url"], human_bytes(url["size"]), url["comment_text"])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]["url"], urls[n]["size"], package)) d["pypiurl"] = urls[n]["url"] d["md5"] = urls[n]["md5_digest"] d["filename"] = urls[n]["filename"] d["homeurl"] = data["home_page"] license_classifier = "License :: OSI Approved ::" licenses = [ classifier.lstrip(license_classifier) for classifier in data["classifiers"] if classifier.startswith(license_classifier) ] if not licenses: if data["license"]: if args.noprompt: license = data["license"] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data["license"]) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input("No license could be found for %s on PyPI. What license should I use? " % package) else: license = " or ".join(licenses) d["license"] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp("conda_skeleton") if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d["filename"]) if not isfile(download_path) or hashsum_file(download_path, "md5") != d["md5"]: download(d["pypiurl"], join(SRC_CACHE, d["filename"])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d["filename"]), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: Do this in a subprocess. That way would be safer (the # setup.py can't mess up this code), it will allow building # multiple recipes without a setuptools import messing # everyone up, and it would prevent passing __future__ imports # through. patch_distutils(tempdir) run_setuppy(src_dir) with open(join(tempdir, "pkginfo.yaml")) as fn: pkginfo = yaml.load(fn) uses_distribute = "setuptools" in sys.modules if pkginfo["install_requires"] or uses_distribute: deps = [remove_version_information(dep).lower() for dep in pkginfo["install_requires"]] if "setuptools" in deps: deps.remove("setuptools") if "distribute" not in deps: deps.append("distribute") uses_distribute = False d["build_depends"] = indent.join([""] + ["distribute"] * uses_distribute + deps) ### Could be more discriminatory but enough ### packages also need distribute at run_time... d["run_depends"] = indent.join([""] + ["distribute"] * uses_distribute + deps) if pkginfo["entry_points"]: if not isinstance(pkginfo["entry_points"], dict): print("WARNING: Could not add entry points. They were:") print(pkginfo["entry_points"]) else: entry_list = ( pkginfo["entry_points"].get("console_scripts", []) # TODO: Use pythonw for these + pkginfo["entry_points"].get("gui_scripts", []) ) d["entry_points"] = indent.join([""] + entry_list) d["build_comment"] = "" d["test_commands"] = indent.join([""] + make_entry_tests(entry_list)) if pkginfo["packages"]: deps = set(pkginfo["packages"]) if d["import_tests"]: deps = set([d["import_tests"]]) | deps d["import_tests"] = indent.join([""] + list(deps)) d["import_comment"] = "" finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), "meta.yaml"), "w") as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), "build.sh"), "w") as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), "bld.bat"), "w") as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")