Example #1
0
def download_to_cache(meta):
    if not isdir(SRC_CACHE):
        os.makedirs(SRC_CACHE)

    fn = meta['fn']
    md5 = meta.get('md5')
    path = join(SRC_CACHE, fn)
    if not isfile(path):
        download(meta['url'], path, md5)

    if md5 and not md5_file(path) == md5:
        raise Exception("MD5 mismatch: %r" % meta)
    return path
Example #2
0
def download_to_cache(meta):
    if not isdir(SRC_CACHE):
        os.makedirs(SRC_CACHE)

    fn = meta['fn']
    path = join(SRC_CACHE, fn)
    if not isfile(path):
        download(meta['url'], path)

    for tp in 'md5', 'sha1':
        if meta.get(tp) and hashsum_file(path, tp) != meta[tp]:
            raise RuntimeError("%s mismatch: '%s' != '%s'" %
                               (tp.upper(), hashsum_file(path, tp), meta[tp]))

    return path
Example #3
0
def main(args, parser):
    client = ServerProxy(args.pypi_url)
    package_dicts = {}
    [output_dir] = args.output_dir

    if len(args.packages) > 1 and args.download:
        # Because if a package's setup.py imports setuptools, it will make all
        # future packages look like they depend on distribute. Also, who knows
        # what kind of monkeypatching the setup.pys out there could be doing.
        print("WARNING: building more than one recipe at once without "
            "--no-download is not recommended")
    for package in args.packages:
        dir_path = join(output_dir, package.lower())
        if exists(dir_path):
            raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package, {'packagename':
            package.lower(), 'orig_packagename': package, 'run_depends':'',
            'build_depends':'', 'entry_points':'', 'build_comment':'# ',
            'test_commands':'', 'usemd5':''})
        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not available on PyPI."
                    % (version, package))
            d['version'] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                sys.exit("Error: Could not find any versions of package %s" % package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" % package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[0])
                print("Use --version to specify a different version.")
            d['version'] = versions[0]

        data = client.release_data(package, d['version'])
        urls = client.release_urls(package, d['version'])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                urls[0]['filename'] = urls[0]['url'].split('/')[-1]
                d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1:
            print("More than one source version is available for %s:" % package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                    human_bytes(url['size']), url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'], package))

        d['pypiurl'] = urls[n]['url']
        d['md5'] = urls[n]['md5_digest']
        d['filename'] = urls[n]['filename']


        d['homeurl'] = data['home_page']
        license_classifier = "License :: OSI Approved ::"
        licenses = [classifier.lstrip(license_classifier) for classifier in
            data['classifiers'] if classifier.startswith(license_classifier)]
        if not licenses:
            if data['license']:
                # Some projects put the whole license text in this field
                print("This is the license for %s" % package)
                print()
                print(data['license'])
                print()
                license = input("What license string should I use? ")
            else:
                license = input("No license could be found for %s on PyPI. What license should I use? " % package)
        else:
            license = ' or '.join(licenses)
        d['license'] = license

        # Unfortunately, two important pieces of metadata are only stored in
        # the package itself: the dependencies, and the entry points (if the
        # package uses distribute).  Our strategy is to download the package
        # and "fake" distribute/setuptools's setup() function to get this
        # information from setup.py. If this sounds evil, keep in mind that
        # distribute itself already works by monkeypatching distutils.
        if args.download:
            import yaml
            print("Downloading %s (use --no-download to skip this step)" % package)
            tempdir = mkdtemp('conda_skeleton')
            indent = '\n    - '

            if not isdir(SRC_CACHE):
                makedirs(SRC_CACHE)

            try:
                # Download it to the build source cache. That way, you have
                # it.
                download_path = join(SRC_CACHE, d['filename'])
                if not isfile(download_path) or hashsum_file(download_path,
                    'md5') != d['md5']:
                    download(d['pypiurl'], join(SRC_CACHE, d['filename']),
                        md5=d['md5'])
                else:
                    print("Using cached download")
                print("Unpacking %s..." % package)
                unpack(join(SRC_CACHE, d['filename']), tempdir)
                print("done")
                print("working in %s" % tempdir)
                src_dir = get_dir(tempdir)
                # TODO: Do this in a subprocess. That way would be safer (the
                # setup.py can't mess up this code), it will allow building
                # multiple recipes without a setuptools import messing
                # everyone up, and it would prevent passing __future__ imports
                # through.
                patch_distutils(tempdir)
                run_setuppy(src_dir)
                with open(join(tempdir, 'pkginfo.yaml')) as fn:
                    pkginfo = yaml.load(fn)

                uses_distribute = 'setuptools' in sys.modules

                if pkginfo['install_requires'] or uses_distribute:
                    deps = [remove_version_information(dep) for dep in
                        pkginfo['install_requires']]
                    if 'setuptools' in deps:
                        deps.remove('setuptools')
                        if 'distribute' not in deps:
                            deps.append('distribute')
                            uses_distribute = False
                    d['build_depends'] = indent.join([''] +
                        ['distribute']*uses_distribute + deps)
                    d['run_depends'] = indent.join([''] + deps)

                if pkginfo['entry_points']:
                    if not isinstance(pkginfo['entry_points'], dict):
                        print("WARNING: Could not add entry points. They were:")
                        print(pkginfo['entry_points'])
                    else:
                        entry_list = (
                            pkginfo['entry_points'].get('console_scripts', [])
                            # TODO: Use pythonw for these
                            + pkginfo['entry_points'].get('gui_scripts', []))
                        d['entry_points'] = indent.join([''] + entry_list)
                        d['build_comment'] = ''
                        d['test_commands'] = indent.join([''] + make_entry_tests(entry_list))
            finally:
                rm_rf(tempdir)


    for package in package_dicts:
        d = package_dicts[package]
        makedirs(join(output_dir, package.lower()))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, package.lower(), 'meta.yaml'),
            'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package.lower(), 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package.lower(), 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")