Ejemplo n.º 1
0
def rm_tarballs(args):
    from os.path import join, getsize

    pkgs_dir = config.pkgs_dirs[0]
    print('Cache location: %s' % pkgs_dir)

    rmlist = []
    for fn in os.listdir(pkgs_dir):
        if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
            rmlist.append(fn)

    if not rmlist:
        print("There are no tarballs to remove")
        sys.exit(0)

    print("Will remove the following tarballs:")
    print()
    totalsize = 0
    maxlen = len(max(rmlist, key=lambda x: len(str(x))))
    fmt = "%-40s %10s"
    for fn in rmlist:
        size = getsize(join(pkgs_dir, fn))
        totalsize += size
        print(fmt % (fn, human_bytes(size)))
    print('-' * (maxlen + 2 + 10))
    print(fmt % ('Total:', human_bytes(totalsize)))
    print()

    common.confirm_yn(args)

    for fn in rmlist:
        print("removing %s" % fn)
        os.unlink(os.path.join(pkgs_dir, fn))
Ejemplo n.º 2
0
def display_actions(actions, index=None):
    if actions.get(FETCH):
        print("\nThe following packages will be downloaded:\n")
        print_dists(
            [
                (dist, ("%15s" % human_bytes(index[dist + ".tar.bz2"]["size"])) if index else None)
                for dist in actions[FETCH]
            ]
        )
        if index and len(actions[FETCH]) > 1:
            print(" " * 4 + "-" * 60)
            print(
                " " * 43 + "Total: %14s" % human_bytes(sum(index[dist + ".tar.bz2"]["size"] for dist in actions[FETCH]))
            )
    if actions.get(UNLINK):
        print("\nThe following packages will be UN-linked:\n")
        print_dists([(dist, None) for dist in actions[UNLINK]])
    if actions.get(LINK):
        print("\nThe following packages will be linked:\n")
        lst = []
        for arg in actions[LINK]:
            dist, pkgs_dir, lt = split_linkarg(arg)
            extra = "   %s" % install.link_name_map.get(lt)
            lst.append((dist, extra))
        print_dists(lst)
    print()
Ejemplo n.º 3
0
def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
            verbose=True):
    from conda.install import rm_rf

    if verbose:
        print('Cache location: %s' % pkgs_dir)
        for fn, exception in warnings:
            print(exception)

    if not rmlist:
        if verbose:
            print("There are no unused packages to remove")
        return

    if verbose:
        print("Will remove the following packages:")
        print()
        maxlen = len(max(rmlist, key=lambda x: len(str(x))))
        fmt = "%-40s %10s"
        for pkg, pkgsize in zip(rmlist, pkgsizes):
            print(fmt % (pkg, human_bytes(pkgsize)))
        print('-' * (maxlen + 2 + 10))
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for pkg in rmlist:
        if verbose:
            print("removing %s" % pkg)
        rm_rf(join(pkgs_dir, pkg))
Ejemplo n.º 4
0
Archivo: plan.py Proyecto: jschaf/conda
def display_actions(actions, index=None):
    if actions.get(FETCH):
        print("\nThe following packages will be downloaded:\n")

        disp_lst = []
        for dist in actions[FETCH]:
            info = index[dist + '.tar.bz2']
            extra = '%15s' % human_bytes(info['size'])
            if config.show_channel_urls:
                extra += '  %s' % config.canonical_channel_name(
                                       info.get('channel'))
            disp_lst.append((dist, extra))
        print_dists(disp_lst)

        if index and len(actions[FETCH]) > 1:
            print(' ' * 4 + '-' * 60)
            print(" " * 43 + "Total: %14s" %
                  human_bytes(sum(index[dist + '.tar.bz2']['size']
                                  for dist in actions[FETCH])))
    if actions.get(UNLINK):
        print("\nThe following packages will be UN-linked:\n")
        print_dists([
                (dist, None)
                for dist in actions[UNLINK]])
    if actions.get(LINK):
        print("\nThe following packages will be linked:\n")
        lst = []
        for arg in actions[LINK]:
            dist, pkgs_dir, lt = split_linkarg(arg)
            extra = '   %s' % install.link_name_map.get(lt)
            lst.append((dist, extra))
        print_dists(lst)
    print()
Ejemplo n.º 5
0
def rm_tarballs(args):
    from os.path import join, getsize

    pkgs_dir = config.pkgs_dirs[0]
    print('Cache location: %s' % pkgs_dir)

    rmlist = []
    for fn in os.listdir(pkgs_dir):
        if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
            rmlist.append(fn)

    if not rmlist:
        print("There are no tarballs to remove")
        sys.exit(0)

    print("Will remove the following tarballs:")
    print()
    totalsize = 0
    maxlen = len(max(rmlist, key=lambda x: len(str(x))))
    fmt = "%-40s %10s"
    for fn in rmlist:
        size = getsize(join(pkgs_dir, fn))
        totalsize += size
        print(fmt % (fn, human_bytes(size)))
    print('-' * (maxlen + 2 + 10))
    print(fmt % ('Total:', human_bytes(totalsize)))
    print()

    common.confirm_yn(args)

    for fn in rmlist:
        print("removing %s" % fn)
        os.unlink(os.path.join(pkgs_dir, fn))
Ejemplo n.º 6
0
def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True):
    if verbose:
        print('Cache location: %s' % pkgs_dir)

    if not rmlist:
        if verbose:
            print("There are no tarballs to remove")
        return

    if verbose:
        print("Will remove the following tarballs:")
        print()

        maxlen = len(max(rmlist, key=lambda x: len(str(x))))
        fmt = "%-40s %10s"
        for fn in rmlist:
            size = getsize(join(pkgs_dir, fn))
            print(fmt % (fn, human_bytes(size)))
        print('-' * (maxlen + 2 + 10))
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for fn in rmlist:
        if verbose:
            print("removing %s" % fn)
        os.unlink(os.path.join(pkgs_dir, fn))
Ejemplo n.º 7
0
def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True):
    if verbose:
        print('Cache location: %s' % pkgs_dir)

    if not rmlist:
        if verbose:
            print("There are no tarballs to remove")
        return

    if verbose:
        print("Will remove the following tarballs:")
        print()

        maxlen = len(max(rmlist, key=lambda x: len(str(x))))
        fmt = "%-40s %10s"
        for fn in rmlist:
            size = getsize(join(pkgs_dir, fn))
            print(fmt % (fn, human_bytes(size)))
        print('-' * (maxlen + 2 + 10))
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for fn in rmlist:
        if verbose:
            print("removing %s" % fn)
        os.unlink(os.path.join(pkgs_dir, fn))
Ejemplo n.º 8
0
def display_actions(actions, index=None):
    if actions.get(FETCH):
        print("\nThe following packages will be downloaded:\n")

        disp_lst = []
        for dist in actions[FETCH]:
            info = index[dist + '.tar.bz2']
            extra = '%15s' % human_bytes(info['size'])
            if config.show_channel_urls:
                extra += '  %s' % config.canonical_channel_name(
                    info.get('channel'))
            disp_lst.append((dist, extra))
        print_dists(disp_lst)

        if index and len(actions[FETCH]) > 1:
            print(' ' * 4 + '-' * 60)
            print(" " * 43 + "Total: %14s" % human_bytes(
                sum(index[dist + '.tar.bz2']['size']
                    for dist in actions[FETCH])))
    if actions.get(UNLINK):
        print("\nThe following packages will be UN-linked:\n")
        print_dists([(dist, None) for dist in actions[UNLINK]])
    if actions.get(LINK):
        print("\nThe following packages will be linked:\n")
        lst = []
        for arg in actions[LINK]:
            dist, pkgs_dir, lt = split_linkarg(arg)
            extra = '   %s' % install.link_name_map.get(lt)
            lst.append((dist, extra))
        print_dists(lst)
    print()
Ejemplo n.º 9
0
def rm_pkgs(args):
    # TODO: This doesn't handle packages that have hard links to files within
    # themselves, like bin/python3.3 and bin/python3.3m in the Python package
    from os.path import join, isdir
    from os import lstat, walk, listdir
    from conda.install import rm_rf

    pkgs_dir = config.pkgs_dirs[0]
    print('Cache location: %s' % pkgs_dir)

    rmlist = []
    pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
        # Only include actual packages
        isdir(join(pkgs_dir, i, 'info'))]
    for pkg in pkgs:
        breakit = False
        for root, dir, files in walk(join(pkgs_dir, pkg)):
            if breakit:
                break
            for fn in files:
                try:
                    stat = lstat(join(root, fn))
                except OSError as e:
                    print(e)
                    continue
                if stat.st_nlink > 1:
                    # print('%s is installed: %s' % (pkg, join(root, fn)))
                    breakit = True
                    break
        else:
            rmlist.append(pkg)

    if not rmlist:
        print("There are no unused packages to remove")
        sys.exit(0)

    print("Will remove the following packages:")
    print()
    totalsize = 0
    maxlen = len(max(rmlist, key=lambda x: len(str(x))))
    fmt = "%-40s %10s"
    for pkg in rmlist:
        pkgsize = 0
        for root, dir, files in walk(join(pkgs_dir, pkg)):
            for fn in files:
                # We don't have to worry about counting things twice:  by
                # definition these files all have a link count of 1!
                size = lstat(join(root, fn)).st_size
                totalsize += size
                pkgsize += size
        print(fmt % (pkg, human_bytes(pkgsize)))
    print('-' * (maxlen + 2 + 10))
    print(fmt % ('Total:', human_bytes(totalsize)))
    print()

    common.confirm_yn(args)

    for pkg in rmlist:
        print("removing %s" % pkg)
        rm_rf(join(pkgs_dir, pkg))
Ejemplo n.º 10
0
def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
            verbose=True):
    if verbose:
        print('Cache location: %s' % pkgs_dir)
        for fn, exception in warnings:
            print(exception)

    if not rmlist:
        if verbose:
            print("There are no unused packages to remove")
        return

    if verbose:
        print("Will remove the following packages:")
        print()
        maxlen = len(max(rmlist, key=lambda x: len(str(x))))
        fmt = "%-40s %10s"
        for pkg, pkgsize in zip(rmlist, pkgsizes):
            print(fmt % (pkg, human_bytes(pkgsize)))
        print('-' * (maxlen + 2 + 10))
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for pkg in rmlist:
        if verbose:
            print("removing %s" % pkg)
        rm_rf(join(pkgs_dir, pkg))
Ejemplo n.º 11
0
def get_download_data(args, client, package, version, is_url):
    data = client.release_data(package, version) if not is_url else None
    urls = client.release_urls(package, version) if not is_url else [package]
    if not is_url and not args.all_urls:
        # Try to find source urls
        urls = [url for url in urls if url['python_version'] == 'source']
    if not urls:
        if 'download_url' in data:
            urls = [defaultdict(str, {'url': data['download_url']})]
            if not urls[0]['url']:
                # The package doesn't have a url, or maybe it only has a wheel.
                sys.exit("Error: Could not build recipe for %s. "
                    "Could not find any valid urls." % package)
            U = parse_url(urls[0]['url'])
            if not U.path:
                sys.exit("Error: Could not parse url for %s: %s" %
                    (package, U))
            urls[0]['filename'] = U.path.rsplit('/')[-1]
            fragment = U.fragment or ''
            if fragment.startswith('md5='):
                md5 = fragment[len('md5='):]
            else:
                md5 = ''
        else:
            sys.exit("Error: No source urls found for %s" % package)
    if len(urls) > 1 and not args.noprompt:
        print("More than one source version is available for %s:" %
                package)
        if args.manual_url:
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                        human_bytes(url['size']), url['comment_text']))
            n = int(input("which version should i use? "))
        else:
            print("Using the one with the least source size")
            print("use --manual-url to override this behavior")
            min_siz, n = min([(url['size'], i)
                                for (i, url) in enumerate(urls)])
    else:
        n = 0

    if not is_url:
        print("Using url %s (%s) for %s." % (urls[n]['url'],
            human_bytes(urls[n]['size'] or 0), package))
        pypiurl = urls[n]['url']
        md5 = urls[n]['md5_digest']
        filename = urls[n]['filename']
    else:
        print("Using url %s" % package)
        pypiurl = package
        U = parse_url(package)
        if U.fragment and U.fragment.startswith('md5='):
            md5 = U.fragment[len('md5='):]
        else:
            md5 = ''
        # TODO: 'package' won't work with unpack()
        filename = U.path.rsplit('/', 1)[-1] or 'package'

    return (data, pypiurl, filename, md5)
Ejemplo n.º 12
0
def get_download_data(args, client, package, version, is_url):
    data = client.release_data(package, version) if not is_url else None
    urls = client.release_urls(package, version) if not is_url else [package]
    if not is_url and not args.all_urls:
        # Try to find source urls
        urls = [url for url in urls if url['python_version'] == 'source']
    if not urls:
        if 'download_url' in data:
            urls = [defaultdict(str, {'url': data['download_url']})]
            if not urls[0]['url']:
                # The package doesn't have a url, or maybe it only has a wheel.
                sys.exit("Error: Could not build recipe for %s. "
                         "Could not find any valid urls." % package)
            U = parse_url(urls[0]['url'])
            if not U.path:
                sys.exit("Error: Could not parse url for %s: %s" %
                         (package, U))
            urls[0]['filename'] = U.path.rsplit('/')[-1]
            fragment = U.fragment or ''
            if fragment.startswith('md5='):
                md5 = fragment[len('md5='):]
            else:
                md5 = ''
        else:
            sys.exit("Error: No source urls found for %s" % package)
    if len(urls) > 1 and not args.noprompt:
        print("More than one source version is available for %s:" % package)
        if args.manual_url:
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" %
                      (i, url['url'], human_bytes(
                          url['size']), url['comment_text']))
            n = int(input("which version should i use? "))
        else:
            print("Using the one with the least source size")
            print("use --manual-url to override this behavior")
            min_siz, n = min([(url['size'], i)
                              for (i, url) in enumerate(urls)])
    else:
        n = 0

    if not is_url:
        print("Using url %s (%s) for %s." %
              (urls[n]['url'], human_bytes(urls[n]['size'] or 0), package))
        pypiurl = urls[n]['url']
        md5 = urls[n]['md5_digest']
        filename = urls[n]['filename'] or 'package'
    else:
        print("Using url %s" % package)
        pypiurl = package
        U = parse_url(package)
        if U.fragment and U.fragment.startswith('md5='):
            md5 = U.fragment[len('md5='):]
        else:
            md5 = ''
        # TODO: 'package' won't work with unpack()
        filename = U.path.rsplit('/', 1)[-1] or 'package'

    return (data, pypiurl, filename, md5)
Ejemplo n.º 13
0
def pretty_package(pkg):
    import conda.config as config
    from conda.utils import human_bytes
    from conda.api import app_is_installed

    d = OrderedDict([
        ('file name', pkg.fn),
        ('name', pkg.name),
        ('version', pkg.version),
        ('build number', pkg.build_number),
        ('build string', pkg.build),
        ('channel', config.canonical_channel_name(pkg.channel)),
        ('size', human_bytes(pkg.info['size'])),
        ])
    rest = pkg.info.copy()
    for key in sorted(rest):
        if key in ['build', 'depends', 'requires', 'channel', 'name',
            'version', 'build_number', 'size']:
            continue
        d[key] = rest[key]


    print()
    header = "%s %s %s" % (d['name'], d['version'], d['build string'])
    print(header)
    print('-'*len(header))
    for key in d:
        print("%-12s: %s" % (key, d[key]))
    print("installed environments:")
    for env in app_is_installed(pkg.fn):
        print('    %s' % env)
    print('dependencies:')
    for dep in pkg.info['depends']:
        print('    %s' % dep)
Ejemplo n.º 14
0
def pretty_package(pkg):
    from conda.utils import human_bytes
    from conda.config import canonical_channel_name

    d = OrderedDict([
        ('file name', pkg.fn),
        ('name', pkg.name),
        ('version', pkg.version),
        ('build number', pkg.build_number),
        ('build string', pkg.build),
        ('channel', canonical_channel_name(pkg.channel)),
        ('size', human_bytes(pkg.info['size'])),
    ])
    rest = pkg.info
    for key in sorted(rest):
        if key in {
                'build', 'depends', 'requires', 'channel', 'name', 'version',
                'build_number', 'size'
        }:
            continue
        d[key] = rest[key]

    print()
    header = "%s %s %s" % (d['name'], d['version'], d['build string'])
    print(header)
    print('-' * len(header))
    for key in d:
        print("%-12s: %s" % (key, d[key]))
    print('dependencies:')
    for dep in pkg.info['depends']:
        print('    %s' % dep)
Ejemplo n.º 15
0
def pretty_package(dist, pkg):
    from conda.utils import human_bytes

    pkg = dump_record(pkg)
    d = OrderedDict([
        ('file name', dist.to_filename()),
        ('name', pkg['name']),
        ('version', pkg['version']),
        ('build string', pkg['build']),
        ('build number', pkg['build_number']),
        ('channel', dist.channel),
        ('size', human_bytes(pkg['size'])),
    ])
    for key in sorted(set(pkg.keys()) - SKIP_FIELDS):
        d[key] = pkg[key]

    print()
    header = "%s %s %s" % (d['name'], d['version'], d['build string'])
    print(header)
    print('-' * len(header))
    for key in d:
        print("%-12s: %s" % (key, d[key]))
    print('dependencies:')
    for dep in pkg['depends']:
        print('    %s' % dep)
Ejemplo n.º 16
0
def pretty_package(pkg):
    from conda.utils import human_bytes
    from conda.models.channel import Channel

    d = OrderedDict([
        ('file name', pkg.fn),
        ('name', pkg.name),
        ('version', pkg.version),
        ('build number', pkg.build_number),
        ('build string', pkg.build),
        ('channel', Channel(pkg.channel).canonical_name),
        ('size', human_bytes(pkg.info['size'])),
    ])
    rest = pkg.info
    for key in sorted(rest):
        if key in {'build', 'depends', 'requires', 'channel', 'name',
                   'version', 'build_number', 'size'}:
            continue
        d[key] = rest[key]

    print()
    header = "%s %s %s" % (d['name'], d['version'], d['build string'])
    print(header)
    print('-'*len(header))
    for key in d:
        print("%-12s: %s" % (key, d[key]))
    print('dependencies:')
    for dep in pkg.info['depends']:
        print('    %s' % dep)
Ejemplo n.º 17
0
def pretty_package(dist, pkg):
    from conda.utils import human_bytes

    pkg = dump_record(pkg)
    d = OrderedDict([
        ('file name', dist.to_filename()),
        ('name', pkg['name']),
        ('version', pkg['version']),
        ('build string', pkg['build']),
        ('build number', pkg['build_number']),
        ('channel', dist.channel),
        ('size', human_bytes(pkg['size'])),
    ])
    for key in sorted(set(pkg.keys()) - SKIP_FIELDS):
        d[key] = pkg[key]

    print()
    header = "%s %s %s" % (d['name'], d['version'], d['build string'])
    print(header)
    print('-'*len(header))
    for key in d:
        print("%-12s: %s" % (key, d[key]))
    print('dependencies:')
    for dep in pkg['depends']:
        print('    %s' % dep)
Ejemplo n.º 18
0
Archivo: plan.py Proyecto: dmj111/conda
def print_dists(dists, index=None):
    fmt = "    %-27s|%17s"
    print(fmt % ('package', 'build'))
    print(fmt % ('-' * 27, '-' * 17))
    for dist in dists:
        line = fmt % tuple(dist.rsplit('-', 1))
        fn = dist + '.tar.bz2'
        if index and fn in index:
            line += '%15s' % human_bytes(index[fn]['size'])
        print(line)
Ejemplo n.º 19
0
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
    if verbose:
        for pkgs_dir in pkgs_dirs:
            print('Cache location: %s' % pkgs_dir)

    if not any(pkgs_dirs[i] for i in pkgs_dirs):
        if verbose:
            print("There are no tarballs to remove")
        return

    if verbose:
        print("Will remove the following tarballs:")
        print()

        for pkgs_dir in pkgs_dirs:
            print(pkgs_dir)
            print('-'*len(pkgs_dir))
            fmt = "%-40s %10s"
            for fn in pkgs_dirs[pkgs_dir]:
                size = getsize(join(pkgs_dir, fn))
                print(fmt % (fn, human_bytes(size)))
            print()
        print('-' * 51) # From 40 + 1 + 10 in fmt
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for pkgs_dir in pkgs_dirs:
        for fn in pkgs_dirs[pkgs_dir]:
            if os.access(os.path.join(pkgs_dir, fn), os.W_OK):
                if verbose:
                    print("Removing %s" % fn)
                os.unlink(os.path.join(pkgs_dir, fn))
            else:
                if verbose:
                    print("WARNING: cannot remove, file permissions: %s" % fn)
Ejemplo n.º 20
0
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
    verbose = not args.json
    if warnings:
        if verbose:
            for warning in warnings:
                print(warning, file=sys.stderr)
        return

    for cache_type in cache_dirs:
        print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
        print("%-40s %10s" % ("Size:", human_bytes(cache_sizes[cache_type])))
        print()

    print("%-40s %10s" % ("Total:", human_bytes(total_size)))

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for dir in cache_dirs.values():
        print("Removing %s" % dir)
        rm_rf(dir)
Ejemplo n.º 21
0
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
    verbose = not args.json
    if warnings:
        if verbose:
            for warning in warnings:
                print(warning, file=sys.stderr)
        return

    for cache_type in cache_dirs:
        print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
        print("%-40s %10s" % ("Size:", human_bytes(cache_sizes[cache_type])))
        print()

    print("%-40s %10s" % ("Total:", human_bytes(total_size)))

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for dir in cache_dirs.values():
        print("Removing %s" % dir)
        rm_rf(dir)
Ejemplo n.º 22
0
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
            verbose=True):
    if verbose:
        for pkgs_dir in pkgs_dirs:
            print('Cache location: %s' % pkgs_dir)
            for fn, exception in warnings:
                print(exception)

    if not any(pkgs_dirs[i] for i in pkgs_dirs):
        if verbose:
            print("There are no unused packages to remove")
        return

    if verbose:
        print("Will remove the following packages:")
        for pkgs_dir in pkgs_dirs:
            print(pkgs_dir)
            print('-' * len(pkgs_dir))
            print()
            fmt = "%-40s %10s"
            for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
                print(fmt % (pkg, human_bytes(pkgsize)))
            print()
        print('-' * 51) # 40 + 1 + 10 in fmt
        print(fmt % ('Total:', human_bytes(totalsize)))
        print()

    if not args.json:
        common.confirm_yn(args)
    if args.json and args.dry_run:
        return

    for pkgs_dir in pkgs_dirs:
        for pkg in pkgs_dirs[pkgs_dir]:
            if verbose:
                print("removing %s" % pkg)
            rm_rf(join(pkgs_dir, pkg))
Ejemplo n.º 23
0
def display_actions(actions, index=None):
    if actions.get(FETCH):
        print("\nThe following packages will be downloaded:\n")
        print_dists([(dist, "%15s" % human_bytes(index[dist + ".tar.bz2"]["size"])) for dist in actions[FETCH]])
    if actions.get(UNLINK):
        print("\nThe following packages will be UN-linked:\n")
        print_dists([(dist, None) for dist in actions[UNLINK]])
    if actions.get(LINK):
        print("\nThe following packages will be linked:\n")
        lst = []
        for arg in actions[LINK]:
            dist, pkgs_dir, lt = split_linkarg(arg)
            extra = "   %s (%d)" % (install.link_name_map.get(lt), config.pkgs_dirs.index(pkgs_dir))
            lst.append((dist, extra))
        print_dists(lst)
    print()
Ejemplo n.º 24
0
def display_actions(actions, index=None):
    if actions.get(FETCH):
        print("\nThe following packages will be downloaded:\n")
        print_dists([
                (dist,
                 ('%15s' % human_bytes(index[dist + '.tar.bz2']['size']))
                 if index else None)
                for dist in actions[FETCH]])
    if actions.get(UNLINK):
        print("\nThe following packages will be UN-linked:\n")
        print_dists([
                (dist, None)
                for dist in actions[UNLINK]])
    if actions.get(LINK):
        print("\nThe following packages will be linked:\n")
        lst = []
        for arg in actions[LINK]:
            dist, pkgs_dir, lt = split_linkarg(arg)
            extra = '   %s' % install.link_name_map.get(lt)
            lst.append((dist, extra))
        print_dists(lst)
    print()
Ejemplo n.º 25
0
def display_actions(actions, index, show_channel_urls=None):
    if show_channel_urls is None:
        show_channel_urls = config_show_channel_urls

    def channel_str(s):
        if s is None:
            return ''
        if show_channel_urls is None:
            return '' if s == 'defaults' else s
        return s if show_channel_urls else ''

    def channel_len(s):
        return len(channel_str(s))

    if actions.get(inst.FETCH):
        print("\nThe following packages will be downloaded:\n")

        disp_lst = []
        for dist in actions[inst.FETCH]:
            info = index[dist + '.tar.bz2']
            extra = '%15s' % human_bytes(info['size'])
            schannel = channel_str(info.get('schannel', '<unknown>'))
            if schannel:
                extra += '  ' + schannel
            disp_lst.append((dist, extra))
        print_dists(disp_lst)

        if index and len(actions[inst.FETCH]) > 1:
            num_bytes = sum(index[dist + '.tar.bz2']['size']
                            for dist in actions[inst.FETCH])
            print(' ' * 4 + '-' * 60)
            print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))

    # package -> [oldver-oldbuild, newver-newbuild]
    packages = defaultdict(lambda: list(('', '')))
    features = defaultdict(lambda: list(('', '')))

    # This assumes each package will appear in LINK no more than once.
    Packages = {}
    linktypes = {}
    for arg in actions.get(inst.LINK, []):
        dist, lt = inst.split_linkarg(arg)
        rec = index[dist + '.tar.bz2']
        pkg = rec['name']
        packages[pkg][1] = rec['version'] + '-' + rec['build']
        dist = pkg + '-' + packages[pkg][1]
        Packages[dist] = Package(dist + '.tar.bz2', rec)
        linktypes[pkg] = lt
        features[pkg][1] = rec.get('features', '')
    for arg in actions.get(inst.UNLINK, []):
        dist, lt = inst.split_linkarg(arg)
        rec = index.get(dist + '.tar.bz2')
        if rec is None:
            pkg, ver, build = dist.split('::', 2)[-1].rsplit('-', 2)
            rec = dict(name=pkg,
                       version=ver,
                       build=build,
                       channel=None,
                       schannel='<unknown>',
                       build_number=int(build) if build.isdigit() else 0)
        pkg = rec['name']
        packages[pkg][0] = rec['version'] + '-' + rec['build']
        dist = pkg + '-' + packages[pkg][0]
        Packages[dist] = Package(dist + '.tar.bz2', rec)
        features[pkg][0] = rec.get('features', '')

    #                     Put a minimum length here---.    .--For the :
    #                                                 v    v
    maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
    maxoldver = len(
        max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewver = len(
        max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
    maxoldfeatures = len(
        max(features.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewfeatures = len(
        max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])

    maxoldchannel = max([
        channel_len(Packages[p + '-' + packages[p][0]].schannel)
        for p in packages if packages[p][0]
    ] or [0])
    maxnewchannel = max([
        channel_len(Packages[p + '-' + packages[p][1]].schannel)
        for p in packages if packages[p][1]
    ] or [0])
    new = {p for p in packages if not packages[p][0]}
    removed = {p for p in packages if not packages[p][1]}
    updated = set()
    downgraded = set()
    oldfmt = {}
    newfmt = {}
    for pkg in packages:
        # That's right. I'm using old-style string formatting to generate a
        # string with new-style string formatting.
        oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
        if maxoldchannel:
            oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
        if packages[pkg][0]:
            newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
        else:
            newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
        if maxnewchannel:
            newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
        # TODO: Should we also care about the old package's link type?
        if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
            newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]

        if features[pkg][0]:
            oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
        if features[pkg][1]:
            newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures

        if pkg in new or pkg in removed:
            continue
        P0 = Packages[pkg + '-' + packages[pkg][0]]
        P1 = Packages[pkg + '-' + packages[pkg][1]]
        try:
            # <= here means that unchanged packages will be put in updated
            newer = ((P0.name, P0.norm_version, P0.build_number) <=
                     (P1.name, P1.norm_version, P1.build_number))
        except TypeError:
            newer = ((P0.name, P0.version, P0.build_number) <=
                     (P1.name, P1.version, P1.build_number))
        if newer or str(P1.version) == 'custom':
            updated.add(pkg)
        else:
            downgraded.add(pkg)

    arrow = ' --> '
    lead = ' ' * 4

    def format(s, pkg):
        channel = ['', '']
        for i in range(2):
            if packages[pkg][i]:
                channel[i] = channel_str(Packages[pkg + '-' +
                                                  packages[pkg][i]].schannel)
        return lead + s.format(pkg=pkg + ':',
                               vers=packages[pkg],
                               channel=channel,
                               features=features[pkg])

    if new:
        print("\nThe following NEW packages will be INSTALLED:\n")
    for pkg in sorted(new):
        print(format(newfmt[pkg], pkg))

    if removed:
        print("\nThe following packages will be REMOVED:\n")
    for pkg in sorted(removed):
        print(format(oldfmt[pkg], pkg))

    if updated:
        print("\nThe following packages will be UPDATED:\n")
    for pkg in sorted(updated):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    if downgraded:
        print("\nThe following packages will be DOWNGRADED:\n")
    for pkg in sorted(downgraded):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    print()
Ejemplo n.º 26
0
def main(args, parser):
    client = ServerProxy(args.pypi_url)
    package_dicts = {}
    [output_dir] = args.output_dir
    indent = '\n    - '

    if len(args.packages) > 1 and args.download:
        # Because if a package's setup.py imports setuptools, it will make all
        # future packages look like they depend on distribute. Also, who knows
        # what kind of monkeypatching the setup.pys out there could be doing.
        print("WARNING: building more than one recipe at once without "
            "--no-download is not recommended")
    for package in args.packages:
        dir_path = join(output_dir, package.lower())
        if exists(dir_path):
            raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package, {'packagename':
            package.lower(), 'run_depends':'',
            'build_depends':'', 'entry_points':'', 'build_comment':'# ',
            'test_commands':'', 'usemd5':'', 'entry_comment':'#', 'egg_comment':'#'})
        d['import_tests'] = valid(package).lower()
        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = indent+d['import_tests']

        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not available on PyPI."
                    % (version, package))
            d['version'] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                sys.exit("Error: Could not find any versions of package %s" % package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" % package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[0])
                print("Use --version to specify a different version.")
            d['version'] = versions[0]

        data = client.release_data(package, d['version'])
        urls = client.release_urls(package, d['version'])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                urls[0]['filename'] = urls[0]['url'].split('/')[-1]
                d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" % package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                    human_bytes(url['size']), url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'], package))

        d['pypiurl'] = urls[n]['url']
        d['md5'] = urls[n]['md5_digest']
        d['filename'] = urls[n]['filename']


        d['homeurl'] = data['home_page']
        license_classifier = "License :: OSI Approved ::"
        licenses = [classifier.lstrip(license_classifier) for classifier in
            data['classifiers'] if classifier.startswith(license_classifier)]
        if not licenses:
            if data['license']:
                if args.noprompt:
                    license = data['license']
                else:
                    # Some projects put the whole license text in this field
                    print("This is the license for %s" % package)
                    print()
                    print(data['license'])
                    print()
                    license = input("What license string should I use? ")
            else:
                if args.noprompt:
                    license = "UNKNOWN"
                else:
                    license = input("No license could be found for %s on PyPI. What license should I use? " % package)
        else:
            license = ' or '.join(licenses)
        d['license'] = license

        # Unfortunately, two important pieces of metadata are only stored in
        # the package itself: the dependencies, and the entry points (if the
        # package uses distribute).  Our strategy is to download the package
        # and "fake" distribute/setuptools's setup() function to get this
        # information from setup.py. If this sounds evil, keep in mind that
        # distribute itself already works by monkeypatching distutils.
        if args.download:
            import yaml
            print("Downloading %s (use --no-download to skip this step)" % package)
            tempdir = mkdtemp('conda_skeleton')

            if not isdir(SRC_CACHE):
                makedirs(SRC_CACHE)

            try:
                # Download it to the build source cache. That way, you have
                # it.
                download_path = join(SRC_CACHE, d['filename'])
                if not isfile(download_path) or hashsum_file(download_path,
                                                             'md5') != d['md5']:
                    download(d['pypiurl'], join(SRC_CACHE, d['filename']))
                else:
                    print("Using cached download")
                print("Unpacking %s..." % package)
                unpack(join(SRC_CACHE, d['filename']), tempdir)
                print("done")
                print("working in %s" % tempdir)
                src_dir = get_dir(tempdir)
                # TODO: Do this in a subprocess. That way would be safer (the
                # setup.py can't mess up this code), it will allow building
                # multiple recipes without a setuptools import messing
                # everyone up, and it would prevent passing __future__ imports
                # through.
                patch_distutils(tempdir)
                run_setuppy(src_dir)
                with open(join(tempdir, 'pkginfo.yaml')) as fn:
                    pkginfo = yaml.load(fn)

                setuptools_build = 'setuptools' in sys.modules
                setuptools_run = False

                # Look at the entry_points and construct console_script and
                #  gui_scripts entry_points for conda and
                entry_points = pkginfo['entry_points']
                if entry_points:
                    if isinstance(entry_points, str):
                        # makes sure it is left-shifted
                        newstr = "\n".join(x.strip() for x in entry_points.split('\n'))
                        config = configparser.ConfigParser()
                        entry_points = {}
                        try:
                            config.readfp(StringIO(newstr))
                        except Exception as err:
                            print("WARNING: entry-points not understood: ", err)
                            print("The string was", newstr)
                            entry_points = pkginfo['entry_points']
                        else:
                            setuptools_run = True
                            for section in config.sections():
                                if section in ['console_scripts', 'gui_scripts']:
                                    value = ['%s=%s' % (option, config.get(section, option))
                                                 for option in config.options(section) ]
                                    entry_points[section] = value
                    if not isinstance(entry_points, dict):
                        print("WARNING: Could not add entry points. They were:")
                        print(entry_points)
                    else:
                        cs = entry_points.get('console_scripts', [])
                        gs = entry_points.get('gui_scripts',[])
                        # We have *other* kinds of entry-points so we need setuptools at run-time
                        if not cs and not gs and len(entry_points) > 1:
                            setuptools_build = True
                            setuptools_run = True
                        entry_list = (
                            cs
                            # TODO: Use pythonw for these
                            + gs)
                        if len(cs+gs) != 0:
                            d['entry_points'] = indent.join([''] + entry_list)
                            d['entry_comment'] = ''
                            d['build_comment'] = ''
                            d['test_commands'] = indent.join([''] + make_entry_tests(entry_list))


                if pkginfo['install_requires'] or setuptools_build or setuptools_run:
                    deps = [remove_version_information(dep).lower() for dep in
                        pkginfo['install_requires']]
                    if 'setuptools' in deps:
                        setuptools_build = False
                        setuptools_run = False
                        d['egg_comment'] = ''
                        d['build_comment'] = ''
                    d['build_depends'] = indent.join([''] +
                        ['setuptools']*setuptools_build + deps)
                    d['run_depends'] = indent.join([''] +
                        ['setuptools']*setuptools_run + deps)

                if pkginfo['packages']:
                    deps = set(pkginfo['packages'])
                    if d['import_tests']:
                        olddeps = [x for x in d['import_tests'].split() if x != '-']
                        deps = set(olddeps) | deps
                    d['import_tests'] = indent.join([''] + list(deps))
                    d['import_comment'] = ''
            finally:
                rm_rf(tempdir)


    for package in package_dicts:
        d = package_dicts[package]
        makedirs(join(output_dir, package.lower()))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, package.lower(), 'meta.yaml'),
            'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package.lower(), 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package.lower(), 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
Ejemplo n.º 27
0
def rm_pkgs(args):
    # TODO: This doesn't handle packages that have hard links to files within
    # themselves, like bin/python3.3 and bin/python3.3m in the Python package
    from os.path import join, isdir
    from os import lstat, walk, listdir
    from conda.install import rm_rf

    pkgs_dir = config.pkgs_dirs[0]
    print('Cache location: %s' % pkgs_dir)

    rmlist = []
    pkgs = [
        i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
        # Only include actual packages
        isdir(join(pkgs_dir, i, 'info'))
    ]
    for pkg in pkgs:
        breakit = False
        for root, dir, files in walk(join(pkgs_dir, pkg)):
            if breakit:
                break
            for fn in files:
                try:
                    stat = lstat(join(root, fn))
                except OSError as e:
                    print(e)
                    continue
                if stat.st_nlink > 1:
                    # print('%s is installed: %s' % (pkg, join(root, fn)))
                    breakit = True
                    break
        else:
            rmlist.append(pkg)

    if not rmlist:
        print("There are no unused packages to remove")
        sys.exit(0)

    print("Will remove the following packages:")
    print()
    totalsize = 0
    maxlen = len(max(rmlist, key=lambda x: len(str(x))))
    fmt = "%-40s %10s"
    for pkg in rmlist:
        pkgsize = 0
        for root, dir, files in walk(join(pkgs_dir, pkg)):
            for fn in files:
                # We don't have to worry about counting things twice:  by
                # definition these files all have a link count of 1!
                size = lstat(join(root, fn)).st_size
                totalsize += size
                pkgsize += size
        print(fmt % (pkg, human_bytes(pkgsize)))
    print('-' * (maxlen + 2 + 10))
    print(fmt % ('Total:', human_bytes(totalsize)))
    print()

    common.confirm_yn(args)

    for pkg in rmlist:
        print("removing %s" % pkg)
        rm_rf(join(pkgs_dir, pkg))
Ejemplo n.º 28
0
def main(args, parser):
    client = ServerProxy(args.pypi_url)
    package_dicts = {}
    [output_dir] = args.output_dir
    indent = "\n    - "

    if len(args.packages) > 1 and args.download:
        # Because if a package's setup.py imports setuptools, it will make all
        # future packages look like they depend on distribute. Also, who knows
        # what kind of monkeypatching the setup.pys out there could be doing.
        print("WARNING: building more than one recipe at once without " "--no-download is not recommended")
    for package in args.packages:
        dir_path = join(output_dir, package.lower())
        if exists(dir_path):
            raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(
            package,
            {
                "packagename": package.lower(),
                "run_depends": "",
                "build_depends": "",
                "entry_points": "",
                "build_comment": "# ",
                "test_commands": "",
                "usemd5": "",
            },
        )
        d["import_tests"] = valid(package).lower()
        if d["import_tests"] == "":
            d["import_comment"] = "# "
        else:
            d["import_comment"] = ""
            d["import_tests"] = indent + d["import_tests"]

        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package))
            d["version"] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                sys.exit("Error: Could not find any versions of package %s" % package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" % package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[0])
                print("Use --version to specify a different version.")
            d["version"] = versions[0]

        data = client.release_data(package, d["version"])
        urls = client.release_urls(package, d["version"])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url["python_version"] == "source"]
        if not urls:
            if "download_url" in data:
                urls = [defaultdict(str, {"url": data["download_url"]})]
                urls[0]["filename"] = urls[0]["url"].split("/")[-1]
                d["usemd5"] = "#"
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" % package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url["url"], human_bytes(url["size"]), url["comment_text"]))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]["url"], urls[n]["size"], package))

        d["pypiurl"] = urls[n]["url"]
        d["md5"] = urls[n]["md5_digest"]
        d["filename"] = urls[n]["filename"]

        d["homeurl"] = data["home_page"]
        license_classifier = "License :: OSI Approved ::"
        licenses = [
            classifier.lstrip(license_classifier)
            for classifier in data["classifiers"]
            if classifier.startswith(license_classifier)
        ]
        if not licenses:
            if data["license"]:
                if args.noprompt:
                    license = data["license"]
                else:
                    # Some projects put the whole license text in this field
                    print("This is the license for %s" % package)
                    print()
                    print(data["license"])
                    print()
                    license = input("What license string should I use? ")
            else:
                if args.noprompt:
                    license = "UNKNOWN"
                else:
                    license = input("No license could be found for %s on PyPI. What license should I use? " % package)
        else:
            license = " or ".join(licenses)
        d["license"] = license

        # Unfortunately, two important pieces of metadata are only stored in
        # the package itself: the dependencies, and the entry points (if the
        # package uses distribute).  Our strategy is to download the package
        # and "fake" distribute/setuptools's setup() function to get this
        # information from setup.py. If this sounds evil, keep in mind that
        # distribute itself already works by monkeypatching distutils.
        if args.download:
            import yaml

            print("Downloading %s (use --no-download to skip this step)" % package)
            tempdir = mkdtemp("conda_skeleton")

            if not isdir(SRC_CACHE):
                makedirs(SRC_CACHE)

            try:
                # Download it to the build source cache. That way, you have
                # it.
                download_path = join(SRC_CACHE, d["filename"])
                if not isfile(download_path) or hashsum_file(download_path, "md5") != d["md5"]:
                    download(d["pypiurl"], join(SRC_CACHE, d["filename"]))
                else:
                    print("Using cached download")
                print("Unpacking %s..." % package)
                unpack(join(SRC_CACHE, d["filename"]), tempdir)
                print("done")
                print("working in %s" % tempdir)
                src_dir = get_dir(tempdir)
                # TODO: Do this in a subprocess. That way would be safer (the
                # setup.py can't mess up this code), it will allow building
                # multiple recipes without a setuptools import messing
                # everyone up, and it would prevent passing __future__ imports
                # through.
                patch_distutils(tempdir)
                run_setuppy(src_dir)
                with open(join(tempdir, "pkginfo.yaml")) as fn:
                    pkginfo = yaml.load(fn)

                uses_distribute = "setuptools" in sys.modules

                if pkginfo["install_requires"] or uses_distribute:
                    deps = [remove_version_information(dep).lower() for dep in pkginfo["install_requires"]]
                    if "setuptools" in deps:
                        deps.remove("setuptools")
                        if "distribute" not in deps:
                            deps.append("distribute")
                            uses_distribute = False
                    d["build_depends"] = indent.join([""] + ["distribute"] * uses_distribute + deps)
                    ### Could be more discriminatory but enough
                    ### packages also need distribute at run_time...
                    d["run_depends"] = indent.join([""] + ["distribute"] * uses_distribute + deps)

                if pkginfo["entry_points"]:
                    if not isinstance(pkginfo["entry_points"], dict):
                        print("WARNING: Could not add entry points. They were:")
                        print(pkginfo["entry_points"])
                    else:
                        entry_list = (
                            pkginfo["entry_points"].get("console_scripts", [])
                            # TODO: Use pythonw for these
                            + pkginfo["entry_points"].get("gui_scripts", [])
                        )
                        d["entry_points"] = indent.join([""] + entry_list)
                        d["build_comment"] = ""
                        d["test_commands"] = indent.join([""] + make_entry_tests(entry_list))
                if pkginfo["packages"]:
                    deps = set(pkginfo["packages"])
                    if d["import_tests"]:
                        deps = set([d["import_tests"]]) | deps
                    d["import_tests"] = indent.join([""] + list(deps))
                    d["import_comment"] = ""
            finally:
                rm_rf(tempdir)

    for package in package_dicts:
        d = package_dicts[package]
        makedirs(join(output_dir, package.lower()))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, package.lower(), "meta.yaml"), "w") as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package.lower(), "build.sh"), "w") as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package.lower(), "bld.bat"), "w") as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
Ejemplo n.º 29
0
Archivo: pypi.py Proyecto: dmj111/conda
def main(args, parser):
    client = ServerProxy(args.pypi_url)
    package_dicts = {}
    for package in args.packages:
        d = package_dicts.setdefault(package, {'packagename': package})
        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not avalaiable on PyPI."
                    % (version, package))
            d['version'] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                sys.exit("Error: Could not find any versions of package %s" % package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" % package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[-1])
                print("Use --version to specify a different version.")
            d['version'] = versions[-1]

        urls = client.release_urls(package, d['version'])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1:
            print("More than one source version is available for %s:" % package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                    human_bytes(url['size']), url['comment_text']))
            n = int(raw_input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'], package))

        d['pypiurl'] = urls[n]['url']
        d['md5'] = urls[n]['md5_digest']
        d['filename'] = urls[n]['filename']

        data = client.release_data(package, d['version'])
        d['homeurl'] = data['home_page']
        license_classifier = "License :: OSI Approved ::"
        licenses = [classifier.lstrip(license_classifier) for classifier in
            data['classifiers'] if classifier.startswith(license_classifier)]
        if not licenses:
            if data['license']:
                # Some projects put the whole license text in this field
                print("This is the license for %s" % package)
                print()
                print(data['license'])
                print()
                license = raw_input("What license string should I use? ")
            else:
                license = raw_input("No license could be found for %s on PyPI. What license should I use? " % package)
        else:
            license = ' or '.join(licenses)
        d['license'] = license

    for package in package_dicts:
        [output_dir] = args.output_dir
        d = package_dicts[package]
        makedirs(join(output_dir, package))
        print("Writing recipe for %s" % package)
        with open(join(output_dir, package, 'meta.yaml'),
            'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package, 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package, 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
Ejemplo n.º 30
0
def display_actions(actions, index):
    if actions.get(FETCH):
        print("\nThe following packages will be downloaded:\n")

        disp_lst = []
        for dist in actions[FETCH]:
            info = index[dist + '.tar.bz2']
            extra = '%15s' % human_bytes(info['size'])
            if config.show_channel_urls:
                extra += '  %s' % config.canonical_channel_name(
                                       info.get('channel'))
            disp_lst.append((dist, extra))
        print_dists(disp_lst)

        if index and len(actions[FETCH]) > 1:
            print(' ' * 4 + '-' * 60)
            print(" " * 43 + "Total: %14s" %
                  human_bytes(sum(index[dist + '.tar.bz2']['size']
                                  for dist in actions[FETCH])))

    # package -> [oldver-oldbuild, newver-newbuild]
    packages = defaultdict(lambda: list(('', '')))
    features = defaultdict(lambda: list(('', '')))

    # This assumes each package will appear in LINK no more than once.
    Packages = {}
    linktypes = {}
    for arg in actions.get(LINK, []):
        dist, pkgs_dir, lt =  split_linkarg(arg)
        pkg, ver, build = dist.rsplit('-', 2)
        packages[pkg][1] = ver + '-' + build
        Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
        linktypes[pkg] = lt
        features[pkg][1] = index[dist + '.tar.bz2'].get('features', '')
    for arg in actions.get(UNLINK, []):
        dist, pkgs_dir, lt =  split_linkarg(arg)
        pkg, ver, build = dist.rsplit('-', 2)
        packages[pkg][0] = ver + '-' + build
        # If the package is not in the index (e.g., an installed
        # package that is not in the index any more), we just have to fake the metadata.
        info = index.get(dist + '.tar.bz2', dict(name=pkg, version=ver,
            build_number=int(build) if build.isdigit() else 0, build=build, channel=None))
        Packages[dist] = Package(dist + '.tar.bz2', info)
        features[pkg][0] = info.get('features', '')



    #             Put a minimum length here---.    .--For the :
    #                                         v    v
    maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
    maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
    maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])
    maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
        packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or
        [''], key=len))
    maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
        packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or
        [''], key=len))
    new = {pkg for pkg in packages if not packages[pkg][0]}
    removed = {pkg for pkg in packages if not packages[pkg][1]}
    updated = set()
    downgraded = set()
    oldfmt = {}
    newfmt = {}
    for pkg in packages:
        # That's right. I'm using old-style string formatting to generate a
        # string with new-style string formatting.
        oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
        if config.show_channel_urls:
            oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
        if packages[pkg][0]:
            newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
        else:
            newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
        if config.show_channel_urls:
            newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
        # TODO: Should we also care about the old package's link type?
        if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
            newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]

        if features[pkg][0]:
            oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
        if features[pkg][1]:
            newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures

        if pkg in new or pkg in removed:
            continue
        P0 = Packages[pkg + '-' + packages[pkg][0]]
        P1 = Packages[pkg + '-' + packages[pkg][1]]
        try:
            # <= here means that unchanged packages will be put in updated
            newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number)
        except TypeError:
            newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number)
        if newer:
            updated.add(pkg)
        else:
            downgraded.add(pkg)

    arrow = ' --> '
    lead = ' '*4

    def format(s, pkg):
        channel = ['', '']
        for i in range(2):
            if packages[pkg][i]:
                channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel)
        return lead + s.format(pkg=pkg+':', vers=packages[pkg],
            channel=channel, features=features[pkg])

    if new:
        print("\nThe following NEW packages will be INSTALLED:\n")
    for pkg in sorted(new):
        print(format(newfmt[pkg], pkg))

    if removed:
        print("\nThe following packages will be REMOVED:\n")
    for pkg in sorted(removed):
        print(format(oldfmt[pkg], pkg))

    if updated:
        print("\nThe following packages will be UPDATED:\n")
    for pkg in sorted(updated):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    if downgraded:
        print("\nThe following packages will be DOWNGRADED:\n")
    for pkg in sorted(downgraded):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    print()
Ejemplo n.º 31
0
def main(args, parser):

    if len(args.packages) > 1 and args.download:
        # Because if a package's setup.py imports setuptools, it will make all
        # future packages look like they depend on distribute. Also, who knows
        # what kind of monkeypatching the setup.pys out there could be doing.
        print("WARNING: building more than one recipe at once without "
              "--no-download is not recommended")

    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir
    indent = '\n    - '

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    while args.packages:
        package = args.packages.pop()
        # Look for package[extra,...] features spec:
        match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package)
        if match_extras:
            package, extras = match_extras.groups()
            extras = extras.split(',')
        else:
            extras = []
        dir_path = join(output_dir, package.lower())
        if exists(dir_path):
            raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package, {'packagename': package.lower(),
                                               'run_depends': '',
                                               'build_depends': '',
                                               'entry_points': '',
                                               'build_comment': '# ',
                                               'test_commands': '',
                                               'usemd5': '',
                                               'entry_comment': '#',
                                               'egg_comment': '#'})
        d['import_tests'] = valid(package).lower()
        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = indent + d['import_tests']

        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not available on PyPI."
                         % (version, package))
            d['version'] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                # The xmlrpc interface is case sensitive, but the index itself
                # is apparently not (the last time I checked,
                # len(set(all_packages_lower)) == len(set(all_packages)))
                if package.lower() in all_packages_lower:
                    print("%s not found, trying %s" % (package, package.capitalize()))
                    args.packages.append(all_packages[all_packages_lower.index(package.lower())])
                    del package_dicts[package]
                    continue
                sys.exit("Error: Could not find any versions of package %s" %
                         package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" %
                      package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[0])
                print("Use --version to specify a different version.")
            d['version'] = versions[0]

        data = client.release_data(package, d['version'])
        urls = client.release_urls(package, d['version'])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                urls[0]['filename'] = urls[0]['url'].split('/')[-1]
                d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" %
                  package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                                          human_bytes(url['size']),
                                          url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]['url'],
                                             human_bytes(urls[n]['size'] or 0),
                                             package))

        d['pypiurl'] = urls[n]['url']
        d['md5'] = urls[n]['md5_digest']
        d['filename'] = urls[n]['filename']

        d['homeurl'] = data['home_page']
        d['summary'] = repr(data['summary'])
        license_classifier = "License :: OSI Approved ::"
        if 'classifiers' in data:
            licenses = [classifier.split(license_classifier, 1)[1] for classifier in
                    data['classifiers'] if classifier.startswith(license_classifier)]
        else:
            licenses = []
        if not licenses:
            if data['license']:
                if args.noprompt:
                    license = data['license']
                elif '\n' not in data['license']:
                    print('Using "%s" for the license' % data['license'])
                    license = data['license']
                else:
                    # Some projects put the whole license text in this field
                    print("This is the license for %s" % package)
                    print()
                    print(data['license'])
                    print()
                    license = input("What license string should I use? ")
            else:
                if args.noprompt:
                    license = "UNKNOWN"
                else:
                    license = input(("No license could be found for %s on " +
                                     "PyPI. What license should I use? ") %
                                    package)
        else:
            license = ' or '.join(licenses)
        d['license'] = license

        # Unfortunately, two important pieces of metadata are only stored in
        # the package itself: the dependencies, and the entry points (if the
        # package uses distribute).  Our strategy is to download the package
        # and "fake" distribute/setuptools's setup() function to get this
        # information from setup.py. If this sounds evil, keep in mind that
        # distribute itself already works by monkeypatching distutils.
        if args.download:
            import yaml
            print("Downloading %s (use --no-download to skip this step)" %
                  package)
            tempdir = mkdtemp('conda_skeleton_' + package)

            if not isdir(SRC_CACHE):
                makedirs(SRC_CACHE)

            try:
                # Download it to the build source cache. That way, you have
                # it.
                download_path = join(SRC_CACHE, d['filename'])
                if not isfile(download_path) or hashsum_file(download_path,
                                                             'md5') != d['md5']:
                    download(d['pypiurl'], join(SRC_CACHE, d['filename']))
                else:
                    print("Using cached download")
                print("Unpacking %s..." % package)
                unpack(join(SRC_CACHE, d['filename']), tempdir)
                print("done")
                print("working in %s" % tempdir)
                src_dir = get_dir(tempdir)
                run_setuppy(src_dir, tempdir, args)
                with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn:
                    pkginfo = yaml.load(fn)

                setuptools_build = pkginfo['setuptools']
                setuptools_run = False

                # Look at the entry_points and construct console_script and
                #  gui_scripts entry_points for conda
                entry_points = pkginfo['entry_points']
                if entry_points:
                    if isinstance(entry_points, str):
                        # makes sure it is left-shifted
                        newstr = "\n".join(x.strip()
                                           for x in entry_points.split('\n'))
                        config = configparser.ConfigParser()
                        entry_points = {}
                        try:
                            config.readfp(StringIO(newstr))
                        except Exception as err:
                            print("WARNING: entry-points not understood: ",
                                  err)
                            print("The string was", newstr)
                            entry_points = pkginfo['entry_points']
                        else:
                            setuptools_run = True
                            for section in config.sections():
                                if section in ['console_scripts', 'gui_scripts']:
                                    value = ['%s=%s' % (option, config.get(section, option))
                                             for option in config.options(section)]
                                    entry_points[section] = value
                    if not isinstance(entry_points, dict):
                        print("WARNING: Could not add entry points. They were:")
                        print(entry_points)
                    else:
                        cs = entry_points.get('console_scripts', [])
                        gs = entry_points.get('gui_scripts', [])
                        # We have *other* kinds of entry-points so we need
                        # setuptools at run-time
                        if not cs and not gs and len(entry_points) > 1:
                            setuptools_build = True
                            setuptools_run = True
                        entry_list = (
                            cs
                            # TODO: Use pythonw for these
                            + gs)
                        if len(cs + gs) != 0:
                            d['entry_points'] = indent.join([''] + entry_list)
                            d['entry_comment'] = ''
                            d['build_comment'] = ''
                            d['test_commands'] = indent.join([''] + make_entry_tests(entry_list))

                # Extract requested extra feature requirements...
                if args.all_extras:
                    extras_require = list(pkginfo['extras_require'].values())
                else:
                    try:
                        extras_require = [pkginfo['extras_require'][x] for x in extras]
                    except KeyError:
                        sys.exit("Error: Invalid extra features: [%s]"
                             % ','.join(extras))
                #... and collect all needed requirement specs in a single list:
                requires = []
                for specs in [pkginfo['install_requires']] + extras_require:
                    if isinstance(specs, string_types):
                        requires.append(specs)
                    else:
                        requires.extend(specs)
                if requires or setuptools_build or setuptools_run:
                    deps = []
                    for deptext in requires:
                        # Every item may be a single requirement
                        #  or a multiline requirements string...
                        for dep in deptext.split('\n'):
                            #... and may also contain comments...
                            dep = dep.split('#')[0].strip()
                            if dep: #... and empty (or comment only) lines
                                spec = spec_from_line(dep)
                                if spec is None:
                                    sys.exit("Error: Could not parse: %s" % dep)
                                deps.append(spec)

                    if 'setuptools' in deps:
                        setuptools_build = False
                        setuptools_run = False
                        d['egg_comment'] = ''
                        d['build_comment'] = ''
                    d['build_depends'] = indent.join([''] +
                                                     ['setuptools'] * setuptools_build +
                                                     deps)
                    d['run_depends'] = indent.join([''] +
                                                   ['setuptools'] * setuptools_run +
                                                   deps)

                    if args.recursive:
                        for dep in deps:
                            dep = dep.split()[0]
                            if not exists(join(output_dir, dep)):
                                args.packages.append(dep)

                if pkginfo['packages']:
                    deps = set(pkginfo['packages'])
                    if d['import_tests']:
                        olddeps = [x for x in d['import_tests'].split()
                                   if x != '-']
                        deps = set(olddeps) | deps
                    d['import_tests'] = indent.join([''] + sorted(deps))
                    d['import_comment'] = ''
            finally:
                rm_rf(tempdir)

    for package in package_dicts:
        d = package_dicts[package]
        makedirs(join(output_dir, package.lower()))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, package.lower(), 'meta.yaml'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package.lower(), 'build.sh'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package.lower(), 'bld.bat'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
Ejemplo n.º 32
0
def display_actions(actions, index):
    if actions.get(FETCH):
        print("\nThe following packages will be downloaded:\n")

        disp_lst = []
        for dist in actions[FETCH]:
            info = index[dist + '.tar.bz2']
            extra = '%15s' % human_bytes(info['size'])
            if config.show_channel_urls:
                extra += '  %s' % config.canonical_channel_name(
                    info.get('channel'))
            disp_lst.append((dist, extra))
        print_dists(disp_lst)

        if index and len(actions[FETCH]) > 1:
            print(' ' * 4 + '-' * 60)
            print(" " * 43 + "Total: %14s" % human_bytes(
                sum(index[dist + '.tar.bz2']['size']
                    for dist in actions[FETCH])))

    # package -> [oldver-oldbuild, newver-newbuild]
    packages = defaultdict(lambda: list(('', '')))
    features = defaultdict(lambda: list(('', '')))

    # This assumes each package will appear in LINK no more than once.
    Packages = {}
    linktypes = {}
    for arg in actions.get(LINK, []):
        dist, pkgs_dir, lt = split_linkarg(arg)
        pkg, ver, build = dist.rsplit('-', 2)
        packages[pkg][1] = ver + '-' + build
        Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
        linktypes[pkg] = lt
        features[pkg][1] = index[dist + '.tar.bz2'].get('features', '')
    for arg in actions.get(UNLINK, []):
        dist, pkgs_dir, lt = split_linkarg(arg)
        pkg, ver, build = dist.rsplit('-', 2)
        packages[pkg][0] = ver + '-' + build
        # If the package is not in the index (e.g., an installed
        # package that is not in the index any more), we just have to fake the metadata.
        info = index.get(
            dist + '.tar.bz2',
            dict(name=pkg,
                 version=ver,
                 build_number=int(build) if build.isdigit() else 0,
                 build=build,
                 channel=None))
        Packages[dist] = Package(dist + '.tar.bz2', info)
        features[pkg][0] = info.get('features', '')

    #             Put a minimum length here---.    .--For the :
    #                                         v    v
    maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
    maxoldver = len(
        max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewver = len(
        max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
    maxoldfeatures = len(
        max(features.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewfeatures = len(
        max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])
    maxoldchannel = len(
        max([
            config.canonical_channel_name(
                Packages[pkg + '-' + packages[pkg][0]].channel)
            for pkg in packages if packages[pkg][0]
        ] or [''],
            key=len))
    maxnewchannel = len(
        max([
            config.canonical_channel_name(
                Packages[pkg + '-' + packages[pkg][1]].channel)
            for pkg in packages if packages[pkg][1]
        ] or [''],
            key=len))
    new = {pkg for pkg in packages if not packages[pkg][0]}
    removed = {pkg for pkg in packages if not packages[pkg][1]}
    updated = set()
    downgraded = set()
    oldfmt = {}
    newfmt = {}
    for pkg in packages:
        # That's right. I'm using old-style string formatting to generate a
        # string with new-style string formatting.
        oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
        if config.show_channel_urls:
            oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
        if packages[pkg][0]:
            newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
        else:
            newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
        if config.show_channel_urls:
            newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
        # TODO: Should we also care about the old package's link type?
        if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
            newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]

        if features[pkg][0]:
            oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
        if features[pkg][1]:
            newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures

        if pkg in new or pkg in removed:
            continue
        P0 = Packages[pkg + '-' + packages[pkg][0]]
        P1 = Packages[pkg + '-' + packages[pkg][1]]
        try:
            # <= here means that unchanged packages will be put in updated
            newer = (P0.name, P0.norm_version, P0.build_number) <= (
                P1.name, P1.norm_version, P1.build_number)
        except TypeError:
            newer = (P0.name, P0.version,
                     P0.build_number) <= (P1.name, P1.version, P1.build_number)
        if newer:
            updated.add(pkg)
        else:
            downgraded.add(pkg)

    arrow = ' --> '
    lead = ' ' * 4

    def format(s, pkg):
        channel = ['', '']
        for i in range(2):
            if packages[pkg][i]:
                channel[i] = config.canonical_channel_name(
                    Packages[pkg + '-' + packages[pkg][i]].channel)
        return lead + s.format(pkg=pkg + ':',
                               vers=packages[pkg],
                               channel=channel,
                               features=features[pkg])

    if new:
        print("\nThe following NEW packages will be INSTALLED:\n")
    for pkg in sorted(new):
        print(format(newfmt[pkg], pkg))

    if removed:
        print("\nThe following packages will be REMOVED:\n")
    for pkg in sorted(removed):
        print(format(oldfmt[pkg], pkg))

    if updated:
        print("\nThe following packages will be UPDATED:\n")
    for pkg in sorted(updated):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    if downgraded:
        print("\nThe following packages will be DOWNGRADED:\n")
    for pkg in sorted(downgraded):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    print()
Ejemplo n.º 33
0
def main(args, parser):

    if len(args.packages) > 1 and args.download:
        # Because if a package's setup.py imports setuptools, it will make all
        # future packages look like they depend on distribute. Also, who knows
        # what kind of monkeypatching the setup.pys out there could be doing.
        print("WARNING: building more than one recipe at once without "
              "--no-download is not recommended")

    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir
    indent = '\n    - '

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    while args.packages:
        package = args.packages.pop()
        # Look for package[extra,...] features spec:
        match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package)
        if match_extras:
            package, extras = match_extras.groups()
            extras = extras.split(',')
        else:
            extras = []
        dir_path = join(output_dir, package.lower())
        if exists(dir_path):
            raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package, {'packagename': package.lower(),
                                               'run_depends': '',
                                               'build_depends': '',
                                               'entry_points': '',
                                               'build_comment': '# ',
                                               'test_commands': '',
                                               'usemd5': '',
                                               'entry_comment': '#',
                                               'egg_comment': '#'})
        d['import_tests'] = valid(package).lower()
        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = indent + d['import_tests']

        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not available on PyPI."
                         % (version, package))
            d['version'] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                # The xmlrpc interface is case sensitive, but the index itself
                # is apparently not (the last time I checked,
                # len(set(all_packages_lower)) == len(set(all_packages)))
                if package.lower() in all_packages_lower:
                    print("%s not found, trying %s" % (package, package.capitalize()))
                    args.packages.append(all_packages[all_packages_lower.index(package.lower())])
                    del package_dicts[package]
                    continue
                sys.exit("Error: Could not find any versions of package %s" %
                         package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" %
                      package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[0])
                print("Use --version to specify a different version.")
            d['version'] = versions[0]

        data = client.release_data(package, d['version'])
        urls = client.release_urls(package, d['version'])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                urls[0]['filename'] = urls[0]['url'].split('/')[-1]
                d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" %
                  package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                                          human_bytes(url['size']),
                                          url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]['url'],
                                             human_bytes(urls[n]['size'] or 0),
                                             package))

        d['pypiurl'] = urls[n]['url']
        d['md5'] = urls[n]['md5_digest']
        d['filename'] = urls[n]['filename']

        d['homeurl'] = data['home_page']
        d['summary'] = repr(data['summary'])
        license_classifier = "License :: OSI Approved ::"
        if 'classifiers' in data:
            licenses = [classifier.split(license_classifier, 1)[1] for classifier in
                    data['classifiers'] if classifier.startswith(license_classifier)]
        else:
            licenses = []
        if not licenses:
            if data['license']:
                if args.noprompt:
                    license = data['license']
                elif '\n' not in data['license']:
                    print('Using "%s" for the license' % data['license'])
                    license = data['license']
                else:
                    # Some projects put the whole license text in this field
                    print("This is the license for %s" % package)
                    print()
                    print(data['license'])
                    print()
                    license = input("What license string should I use? ")
            else:
                if args.noprompt:
                    license = "UNKNOWN"
                else:
                    license = input(("No license could be found for %s on " +
                                     "PyPI. What license should I use? ") %
                                    package)
        else:
            license = ' or '.join(licenses)
        d['license'] = license

        # Unfortunately, two important pieces of metadata are only stored in
        # the package itself: the dependencies, and the entry points (if the
        # package uses distribute).  Our strategy is to download the package
        # and "fake" distribute/setuptools's setup() function to get this
        # information from setup.py. If this sounds evil, keep in mind that
        # distribute itself already works by monkeypatching distutils.
        if args.download:
            import yaml
            print("Downloading %s (use --no-download to skip this step)" %
                  package)
            tempdir = mkdtemp('conda_skeleton_' + package)

            if not isdir(SRC_CACHE):
                makedirs(SRC_CACHE)

            try:
                # Download it to the build source cache. That way, you have
                # it.
                download_path = join(SRC_CACHE, d['filename'])
                if not isfile(download_path) or hashsum_file(download_path,
                                                             'md5') != d['md5']:
                    download(d['pypiurl'], join(SRC_CACHE, d['filename']))
                else:
                    print("Using cached download")
                print("Unpacking %s..." % package)
                unpack(join(SRC_CACHE, d['filename']), tempdir)
                print("done")
                print("working in %s" % tempdir)
                src_dir = get_dir(tempdir)
                run_setuppy(src_dir, tempdir, args)
                with open(join(tempdir, 'pkginfo.yaml')) as fn:
                    pkginfo = yaml.load(fn)

                setuptools_build = pkginfo['setuptools']
                setuptools_run = False

                # Look at the entry_points and construct console_script and
                #  gui_scripts entry_points for conda
                entry_points = pkginfo['entry_points']
                if entry_points:
                    if isinstance(entry_points, str):
                        # makes sure it is left-shifted
                        newstr = "\n".join(x.strip()
                                           for x in entry_points.split('\n'))
                        config = configparser.ConfigParser()
                        entry_points = {}
                        try:
                            config.readfp(StringIO(newstr))
                        except Exception as err:
                            print("WARNING: entry-points not understood: ",
                                  err)
                            print("The string was", newstr)
                            entry_points = pkginfo['entry_points']
                        else:
                            setuptools_run = True
                            for section in config.sections():
                                if section in ['console_scripts', 'gui_scripts']:
                                    value = ['%s=%s' % (option, config.get(section, option))
                                             for option in config.options(section)]
                                    entry_points[section] = value
                    if not isinstance(entry_points, dict):
                        print("WARNING: Could not add entry points. They were:")
                        print(entry_points)
                    else:
                        cs = entry_points.get('console_scripts', [])
                        gs = entry_points.get('gui_scripts', [])
                        # We have *other* kinds of entry-points so we need
                        # setuptools at run-time
                        if not cs and not gs and len(entry_points) > 1:
                            setuptools_build = True
                            setuptools_run = True
                        entry_list = (
                            cs
                            # TODO: Use pythonw for these
                            + gs)
                        if len(cs + gs) != 0:
                            d['entry_points'] = indent.join([''] + entry_list)
                            d['entry_comment'] = ''
                            d['build_comment'] = ''
                            d['test_commands'] = indent.join([''] + make_entry_tests(entry_list))

                # Extract requested extra feature requirements...
                if args.all_extras:
                    extras_require = list(pkginfo['extras_require'].values())
                else:
                    try:
                        extras_require = [pkginfo['extras_require'][x] for x in extras]
                    except KeyError:
                        sys.exit("Error: Invalid extra features: [%s]"
                             % ','.join(extras))
                #... and collect all needed requirement specs in a single list:
                requires = []
                for specs in [pkginfo['install_requires']] + extras_require:
                    if isinstance(specs, string_types):
                        requires.append(specs)
                    else:
                        requires.extend(specs)
                if requires or setuptools_build or setuptools_run:
                    deps = []
                    for deptext in requires:
                        # Every item may be a single requirement
                        #  or a multiline requirements string...
                        for dep in deptext.split('\n'):
                            #... and may also contain comments...
                            dep = dep.split('#')[0].strip()
                            if dep: #... and empty (or comment only) lines
                                spec = spec_from_line(dep)
                                if spec is None:
                                    sys.exit("Error: Could not parse: %s" % dep)
                                deps.append(spec)

                    if 'setuptools' in deps:
                        setuptools_build = False
                        setuptools_run = False
                        d['egg_comment'] = ''
                        d['build_comment'] = ''
                    d['build_depends'] = indent.join([''] +
                                                     ['setuptools'] * setuptools_build +
                                                     deps)
                    d['run_depends'] = indent.join([''] +
                                                   ['setuptools'] * setuptools_run +
                                                   deps)

                    if args.recursive:
                        for dep in deps:
                            dep = dep.split()[0]
                            if not exists(join(output_dir, dep)):
                                args.packages.append(dep)

                if pkginfo['packages']:
                    deps = set(pkginfo['packages'])
                    if d['import_tests']:
                        olddeps = [x for x in d['import_tests'].split()
                                   if x != '-']
                        deps = set(olddeps) | deps
                    d['import_tests'] = indent.join([''] + sorted(deps))
                    d['import_comment'] = ''
            finally:
                rm_rf(tempdir)

    for package in package_dicts:
        d = package_dicts[package]
        makedirs(join(output_dir, package.lower()))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, package.lower(), 'meta.yaml'), 'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package.lower(), 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package.lower(), 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
Ejemplo n.º 34
0
def main(args, parser):
    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    while args.packages:
        [output_dir] = args.output_dir

        package = args.packages.pop()

        is_url = ':' in package

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path):
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package,
            {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'build_comment': '# ',
                'test_commands': '',
                'usemd5': '',
                'test_comment': '',
                'entry_comment': '# ',
                'egg_comment': '# ',
                'summary_comment': '',
                'home_comment': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
        else:
            if args.version:
                [version] = args.version
                versions = client.package_releases(package, True)
                if version not in versions:
                    sys.exit("Error: Version %s of %s is not available on PyPI."
                             % (version, package))
                d['version'] = version
            else:
                versions = client.package_releases(package)
                if not versions:
                    # The xmlrpc interface is case sensitive, but the index itself
                    # is apparently not (the last time I checked,
                    # len(set(all_packages_lower)) == len(set(all_packages)))
                    if package.lower() in all_packages_lower:
                        print("%s not found, trying %s" % (package, package.capitalize()))
                        args.packages.append(all_packages[all_packages_lower.index(package.lower())])
                        del package_dicts[package]
                        continue
                    sys.exit("Error: Could not find any versions of package %s" %
                             package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[0])
                    print("Use --version to specify a different version.")
                d['version'] = versions[0]

        data = client.release_data(package, d['version']) if not is_url else None
        urls = client.release_urls(package, d['version']) if not is_url else [package]
        if not is_url and not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                U = parse_url(urls[0]['url'])
                urls[0]['filename'] = U.path.rsplit('/')[-1]
                fragment = U.fragment or ''
                if fragment.startswith('md5='):
                    d['usemd5'] = ''
                    d['md5'] = fragment[len('md5='):]
                else:
                    d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" %
                  package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                                          human_bytes(url['size']),
                                          url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        if not is_url:
            print("Using url %s (%s) for %s." % (urls[n]['url'],
                human_bytes(urls[n]['size'] or 0), package))
            d['pypiurl'] = urls[n]['url']
            d['md5'] = urls[n]['md5_digest']
            d['filename'] = urls[n]['filename']
        else:
            print("Using url %s" % package)
            d['pypiurl'] = package
            U = parse_url(package)
            if U.fragment.startswith('md5='):
                d['usemd5'] = ''
                d['md5'] = U.fragment[len('md5='):]
            else:
                d['usemd5'] = '#'
                d['md5'] = ''
            # TODO: 'package' won't work with unpack()
            d['filename'] = U.path.rsplit('/', 1)[-1] or 'package'

        if is_url:
            d['import_tests'] = 'PLACEHOLDER'
        else:
            d['import_tests'] = valid(package).lower()

        get_package_metadata(args, package, d, data)

        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = INDENT + d['import_tests']

        if d['entry_comment'] == d['import_comment'] == '# ':
            d['test_comment'] = '# '

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
Ejemplo n.º 35
0
Archivo: plan.py Proyecto: brentp/conda
def display_actions(actions, index, show_channel_urls=None):
    if show_channel_urls is None:
        show_channel_urls = config.show_channel_urls

    def channel_str(s):
        if s is None:
            return ''
        if show_channel_urls is None:
            return '' if s == 'defaults' else s
        return s if show_channel_urls else ''

    def channel_len(s):
        return len(channel_str(s))

    if actions.get(inst.FETCH):
        print("\nThe following packages will be downloaded:\n")

        disp_lst = []
        for dist in actions[inst.FETCH]:
            info = index[dist + '.tar.bz2']
            extra = '%15s' % human_bytes(info['size'])
            schannel = channel_str(info.get('schannel', '<unknown>'))
            if schannel:
                extra += '  ' + schannel
            disp_lst.append((dist, extra))
        print_dists(disp_lst)

        if index and len(actions[inst.FETCH]) > 1:
            num_bytes = sum(index[dist + '.tar.bz2']['size']
                            for dist in actions[inst.FETCH])
            print(' ' * 4 + '-' * 60)
            print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))

    # package -> [oldver-oldbuild, newver-newbuild]
    packages = defaultdict(lambda: list(('', '')))
    features = defaultdict(lambda: list(('', '')))

    # This assumes each package will appear in LINK no more than once.
    Packages = {}
    linktypes = {}
    for arg in actions.get(inst.LINK, []):
        dist, lt = inst.split_linkarg(arg)
        rec = index[dist + '.tar.bz2']
        pkg = rec['name']
        packages[pkg][1] = rec['version'] + '-' + rec['build']
        dist = pkg + '-' + packages[pkg][1]
        Packages[dist] = Package(dist + '.tar.bz2', rec)
        linktypes[pkg] = lt
        features[pkg][1] = rec.get('features', '')
    for arg in actions.get(inst.UNLINK, []):
        dist, lt = inst.split_linkarg(arg)
        rec = index.get(dist + '.tar.bz2')
        if rec is None:
            pkg, ver, build = dist.split('::', 2)[-1].rsplit('-', 2)
            rec = dict(name=pkg, version=ver, build=build, channel=None,
                       schannel='<unknown>',
                       build_number=int(build) if build.isdigit() else 0)
        pkg = rec['name']
        packages[pkg][0] = rec['version'] + '-' + rec['build']
        dist = pkg + '-' + packages[pkg][0]
        Packages[dist] = Package(dist + '.tar.bz2', rec)
        features[pkg][0] = rec.get('features', '')

    #                     Put a minimum length here---.    .--For the :
    #                                                 v    v
    maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
    maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
    maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0])
    maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])

    maxoldchannel = max([channel_len(Packages[p + '-' + packages[p][0]].schannel)
                         for p in packages if packages[p][0]] or [0])
    maxnewchannel = max([channel_len(Packages[p + '-' + packages[p][1]].schannel)
                         for p in packages if packages[p][1]] or [0])
    new = {p for p in packages if not packages[p][0]}
    removed = {p for p in packages if not packages[p][1]}
    updated = set()
    downgraded = set()
    oldfmt = {}
    newfmt = {}
    for pkg in packages:
        # That's right. I'm using old-style string formatting to generate a
        # string with new-style string formatting.
        oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
        if maxoldchannel:
            oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
        if packages[pkg][0]:
            newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
        else:
            newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
        if maxnewchannel:
            newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
        # TODO: Should we also care about the old package's link type?
        if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
            newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]

        if features[pkg][0]:
            oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
        if features[pkg][1]:
            newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures

        if pkg in new or pkg in removed:
            continue
        P0 = Packages[pkg + '-' + packages[pkg][0]]
        P1 = Packages[pkg + '-' + packages[pkg][1]]
        try:
            # <= here means that unchanged packages will be put in updated
            newer = ((P0.name, P0.norm_version, P0.build_number) <=
                     (P1.name, P1.norm_version, P1.build_number))
        except TypeError:
            newer = ((P0.name, P0.version, P0.build_number) <=
                     (P1.name, P1.version, P1.build_number))
        if newer or str(P1.version) == 'custom':
            updated.add(pkg)
        else:
            downgraded.add(pkg)

    arrow = ' --> '
    lead = ' ' * 4

    def format(s, pkg):
        channel = ['', '']
        for i in range(2):
            if packages[pkg][i]:
                channel[i] = channel_str(Packages[pkg + '-' + packages[pkg][i]].schannel)
        return lead + s.format(pkg=pkg + ':', vers=packages[pkg],
                               channel=channel, features=features[pkg])

    if new:
        print("\nThe following NEW packages will be INSTALLED:\n")
    for pkg in sorted(new):
        print(format(newfmt[pkg], pkg))

    if removed:
        print("\nThe following packages will be REMOVED:\n")
    for pkg in sorted(removed):
        print(format(oldfmt[pkg], pkg))

    if updated:
        print("\nThe following packages will be UPDATED:\n")
    for pkg in sorted(updated):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    if downgraded:
        print("\nThe following packages will be DOWNGRADED:\n")
    for pkg in sorted(downgraded):
        print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))

    print()