예제 #1
0
파일: fetch.py 프로젝트: 3kwa/conda
def fetch_pkg(info, dst_dir=None, session=None):
    '''
    fetch a package given by `info` and store it into `dst_dir`
    '''
    if dst_dir is None:
        dst_dir = config.pkgs_dirs[0]

    session = session or CondaSession()

    fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
    url = info['channel'] + fn
    log.debug("url=%r" % url)
    path = join(dst_dir, fn)

    download(url, path, session=session, md5=info['md5'], urlstxt=True,
             ssl_verify=config.ssl_verify, proxy_servers=config.get_proxy_servers())
    if info.get('sig'):
        from conda.signature import verify, SignatureError

        fn2 = fn + '.sig'
        url = (info['channel'] if info['sig'] == '.' else
               info['sig'].rstrip('/') + '/') + fn2
        log.debug("signature url=%r" % url)
        download(url, join(dst_dir, fn2), session=session, ssl_verify=config.ssl_verify,
                 proxy_servers=config.get_proxy_servers())
        try:
            if verify(path):
                return
        except SignatureError as e:
            sys.exit(str(e))
        sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
예제 #2
0
    def __init__(self, *args, **kwargs):
        retries = kwargs.pop('retries', RETRIES)

        super(CondaSession, self).__init__(*args, **kwargs)

        proxies = get_proxy_servers()
        if proxies:
            self.proxies = proxies
        self.trust_env = False  # disable .netrc file
        # also disables REQUESTS_CA_BUNDLE, CURL_CA_BUNDLE env variables

        # Configure retries
        if retries:
            http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
            self.mount("http://", http_adapter)
            self.mount("https://", http_adapter)

        # Enable file:// urls
        self.mount("file://", LocalFSAdapter())

        # Enable ftp:// urls
        self.mount("ftp://", FTPAdapter())

        # Enable s3:// urls
        self.mount("s3://", S3Adapter())

        self.headers['User-Agent'] = user_agent

        self.verify = ssl_verify
예제 #3
0
    def __init__(self, *args, **kwargs):
        retries = kwargs.pop('retries', RETRIES)

        super(CondaSession, self).__init__(*args, **kwargs)

        proxies = get_proxy_servers()
        if proxies:
            self.proxies = proxies

        # Configure retries
        if retries:
            http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
            self.mount("http://", http_adapter)
            self.mount("https://", http_adapter)

        # Enable file:// urls
        self.mount("file://", LocalFSAdapter())

        # Enable ftp:// urls
        self.mount("ftp://", FTPAdapter())

        # Enable s3:// urls
        self.mount("s3://", S3Adapter())

        self.headers['User-Agent'] = "conda/%s %s" % (
                          conda.__version__, self.headers['User-Agent'])
예제 #4
0
    def __init__(self, *args, **kwargs):
        retries = kwargs.pop('retries', RETRIES)

        super(CondaSession, self).__init__(*args, **kwargs)

        proxies = get_proxy_servers()
        if proxies:
            self.proxies = proxies
        self.trust_env = False  # disable .netrc file
                                # also disables REQUESTS_CA_BUNDLE, CURL_CA_BUNDLE env variables

        # Configure retries
        if retries:
            http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
            self.mount("http://", http_adapter)
            self.mount("https://", http_adapter)

        # Enable file:// urls
        self.mount("file://", LocalFSAdapter())

        # Enable ftp:// urls
        self.mount("ftp://", FTPAdapter())

        # Enable s3:// urls
        self.mount("s3://", S3Adapter())

        self.headers['User-Agent'] = user_agent

        self.verify = ssl_verify
예제 #5
0
def get_xmlrpc_client(pypi_url):
    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    return ServerProxy(pypi_url, transport=transport)
예제 #6
0
파일: pypi.py 프로젝트: scw/conda-build
def get_xmlrpc_client(pypi_url):
    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    return ServerProxy(pypi_url, transport=transport)
예제 #7
0
    def __init__(self, *args, **kwargs):
        retries = kwargs.pop('retries', RETRIES)

        super(CondaSession, self).__init__(*args, **kwargs)

        self.proxies = get_proxy_servers()

        # Configure retries
        if retries:
            http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
            self.mount("http://", http_adapter)
            self.mount("https://", http_adapter)

        # Enable file:// urls
        self.mount("file://", LocalFSAdapter())

        # Enable ftp:// urls
        self.mount("ftp://", FTPAdapter())
예제 #8
0
def main(args, parser):

    if len(args.packages) > 1 and args.download:
        # Because if a package's setup.py imports setuptools, it will make all
        # future packages look like they depend on distribute. Also, who knows
        # what kind of monkeypatching the setup.pys out there could be doing.
        print("WARNING: building more than one recipe at once without "
              "--no-download is not recommended")

    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir
    indent = '\n    - '

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    while args.packages:
        package = args.packages.pop()
        dir_path = join(output_dir, package.lower())
        if exists(dir_path):
            raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package, {'packagename': package.lower(),
                                               'run_depends': '',
                                               'build_depends': '',
                                               'entry_points': '',
                                               'build_comment': '# ',
                                               'test_commands': '',
                                               'usemd5': '',
                                               'entry_comment': '#',
                                               'egg_comment': '#'})
        d['import_tests'] = valid(package).lower()
        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = indent + d['import_tests']

        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not available on PyPI."
                         % (version, package))
            d['version'] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                # The xmlrpc interface is case sensitive, but the index itself
                # is apparently not (the last time I checked,
                # len(set(all_packages_lower)) == len(set(all_packages)))
                if package.lower() in all_packages_lower:
                    print("%s not found, trying %s" % (package, package.capitalize()))
                    args.packages.append(all_packages[all_packages_lower.index(package.lower())])
                    del package_dicts[package]
                    continue
                sys.exit("Error: Could not find any versions of package %s" %
                         package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" %
                      package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[0])
                print("Use --version to specify a different version.")
            d['version'] = versions[0]

        data = client.release_data(package, d['version'])
        urls = client.release_urls(package, d['version'])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                urls[0]['filename'] = urls[0]['url'].split('/')[-1]
                d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" %
                  package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                                          human_bytes(url['size']),
                                          url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'],
                                             package))

        d['pypiurl'] = urls[n]['url']
        d['md5'] = urls[n]['md5_digest']
        d['filename'] = urls[n]['filename']

        d['homeurl'] = data['home_page']
        d['summary'] = repr(data['summary'])
        license_classifier = "License :: OSI Approved ::"
        if 'classifiers' in data:
            licenses = [classifier.split(license_classifier, 1)[1] for classifier in
                    data['classifiers'] if classifier.startswith(license_classifier)]
        else:
            licenses = []
        if not licenses:
            if data['license']:
                if args.noprompt:
                    license = data['license']
                elif '\n' not in data['license']:
                    print('Using "%s" for the license' % data['license'])
                    license = data['license']
                else:
                    # Some projects put the whole license text in this field
                    print("This is the license for %s" % package)
                    print()
                    print(data['license'])
                    print()
                    license = input("What license string should I use? ")
            else:
                if args.noprompt:
                    license = "UNKNOWN"
                else:
                    license = input(("No license could be found for %s on " +
                                     "PyPI. What license should I use? ") %
                                    package)
        else:
            license = ' or '.join(licenses)
        d['license'] = license

        # Unfortunately, two important pieces of metadata are only stored in
        # the package itself: the dependencies, and the entry points (if the
        # package uses distribute).  Our strategy is to download the package
        # and "fake" distribute/setuptools's setup() function to get this
        # information from setup.py. If this sounds evil, keep in mind that
        # distribute itself already works by monkeypatching distutils.
        if args.download:
            import yaml
            print("Downloading %s (use --no-download to skip this step)" %
                  package)
            tempdir = mkdtemp('conda_skeleton_' + package)

            if not isdir(SRC_CACHE):
                makedirs(SRC_CACHE)

            try:
                # Download it to the build source cache. That way, you have
                # it.
                download_path = join(SRC_CACHE, d['filename'])
                if not isfile(download_path) or hashsum_file(download_path,
                                                             'md5') != d['md5']:
                    download(d['pypiurl'], join(SRC_CACHE, d['filename']))
                else:
                    print("Using cached download")
                print("Unpacking %s..." % package)
                unpack(join(SRC_CACHE, d['filename']), tempdir)
                print("done")
                print("working in %s" % tempdir)
                src_dir = get_dir(tempdir)
                run_setuppy(src_dir, tempdir, args)
                with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn:
                    pkginfo = yaml.load(fn)

                setuptools_build = pkginfo['setuptools']
                setuptools_run = False

                # Look at the entry_points and construct console_script and
                #  gui_scripts entry_points for conda and
                entry_points = pkginfo['entry_points']
                if entry_points:
                    if isinstance(entry_points, str):
                        # makes sure it is left-shifted
                        newstr = "\n".join(x.strip()
                                           for x in entry_points.split('\n'))
                        config = configparser.ConfigParser()
                        entry_points = {}
                        try:
                            config.readfp(StringIO(newstr))
                        except Exception as err:
                            print("WARNING: entry-points not understood: ",
                                  err)
                            print("The string was", newstr)
                            entry_points = pkginfo['entry_points']
                        else:
                            setuptools_run = True
                            for section in config.sections():
                                if section in ['console_scripts', 'gui_scripts']:
                                    value = ['%s=%s' % (option, config.get(section, option))
                                             for option in config.options(section)]
                                    entry_points[section] = value
                    if not isinstance(entry_points, dict):
                        print("WARNING: Could not add entry points. They were:")
                        print(entry_points)
                    else:
                        cs = entry_points.get('console_scripts', [])
                        gs = entry_points.get('gui_scripts', [])
                        # We have *other* kinds of entry-points so we need
                        # setuptools at run-time
                        if not cs and not gs and len(entry_points) > 1:
                            setuptools_build = True
                            setuptools_run = True
                        entry_list = (
                            cs
                            # TODO: Use pythonw for these
                            + gs)
                        if len(cs + gs) != 0:
                            d['entry_points'] = indent.join([''] + entry_list)
                            d['entry_comment'] = ''
                            d['build_comment'] = ''
                            d['test_commands'] = indent.join([''] + make_entry_tests(entry_list))

                if pkginfo['install_requires'] or setuptools_build or setuptools_run:
                    if isinstance(pkginfo['install_requires'], string_types):
                        pkginfo['install_requires'] = [pkginfo['install_requires']]
                    deps = []
                    for dep in pkginfo['install_requires']:
                        spec = spec_from_line(dep)
                        if spec is None:
                            sys.exit("Error: Could not parse: %s" % dep)
                        deps.append(spec)

                    if 'setuptools' in deps:
                        setuptools_build = False
                        setuptools_run = False
                        d['egg_comment'] = ''
                        d['build_comment'] = ''
                    d['build_depends'] = indent.join([''] +
                                                     ['setuptools'] * setuptools_build +
                                                     deps)
                    d['run_depends'] = indent.join([''] +
                                                   ['setuptools'] * setuptools_run +
                                                   deps)

                    if args.recursive:
                        for dep in deps:
                            if not exists(join(output_dir, dep)):
                                args.packages.append(dep)

                if pkginfo['packages']:
                    deps = set(pkginfo['packages'])
                    if d['import_tests']:
                        olddeps = [x for x in d['import_tests'].split()
                                   if x != '-']
                        deps = set(olddeps) | deps
                    d['import_tests'] = indent.join([''] + sorted(deps))
                    d['import_comment'] = ''
            finally:
                rm_rf(tempdir)

    for package in package_dicts:
        d = package_dicts[package]
        makedirs(join(output_dir, package.lower()))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, package.lower(), 'meta.yaml'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package.lower(), 'build.sh'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package.lower(), 'bld.bat'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
예제 #9
0
def main(args, parser):
    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    while args.packages:
        [output_dir] = args.output_dir

        package = args.packages.pop()

        is_url = ':' in package

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path):
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package,
            {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'build_comment': '# ',
                'test_commands': '',
                'usemd5': '',
                'test_comment': '',
                'entry_comment': '# ',
                'egg_comment': '# ',
                'summary_comment': '',
                'home_comment': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
        else:
            if args.version:
                [version] = args.version
                versions = client.package_releases(package, True)
                if version not in versions:
                    sys.exit("Error: Version %s of %s is not available on PyPI."
                             % (version, package))
                d['version'] = version
            else:
                versions = client.package_releases(package)
                if not versions:
                    # The xmlrpc interface is case sensitive, but the index itself
                    # is apparently not (the last time I checked,
                    # len(set(all_packages_lower)) == len(set(all_packages)))
                    if package.lower() in all_packages_lower:
                        print("%s not found, trying %s" % (package, package.capitalize()))
                        args.packages.append(all_packages[all_packages_lower.index(package.lower())])
                        del package_dicts[package]
                        continue
                    sys.exit("Error: Could not find any versions of package %s" %
                             package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[0])
                    print("Use --version to specify a different version.")
                d['version'] = versions[0]

        data = client.release_data(package, d['version']) if not is_url else None
        urls = client.release_urls(package, d['version']) if not is_url else [package]
        if not is_url and not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                U = parse_url(urls[0]['url'])
                urls[0]['filename'] = U.path.rsplit('/')[-1]
                fragment = U.fragment or ''
                if fragment.startswith('md5='):
                    d['usemd5'] = ''
                    d['md5'] = fragment[len('md5='):]
                else:
                    d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" %
                  package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                                          human_bytes(url['size']),
                                          url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        if not is_url:
            print("Using url %s (%s) for %s." % (urls[n]['url'],
                human_bytes(urls[n]['size'] or 0), package))
            d['pypiurl'] = urls[n]['url']
            d['md5'] = urls[n]['md5_digest']
            d['filename'] = urls[n]['filename']
        else:
            print("Using url %s" % package)
            d['pypiurl'] = package
            U = parse_url(package)
            if U.fragment.startswith('md5='):
                d['usemd5'] = ''
                d['md5'] = U.fragment[len('md5='):]
            else:
                d['usemd5'] = '#'
                d['md5'] = ''
            # TODO: 'package' won't work with unpack()
            d['filename'] = U.path.rsplit('/', 1)[-1] or 'package'

        if is_url:
            d['import_tests'] = 'PLACEHOLDER'
        else:
            d['import_tests'] = valid(package).lower()

        get_package_metadata(args, package, d, data)

        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = INDENT + d['import_tests']

        if d['entry_comment'] == d['import_comment'] == '# ':
            d['test_comment'] = '# '

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
예제 #10
0
 def test_proxy_settings(self):
     config.rc = config.load_condarc(config.rc_path)
     servers = config.get_proxy_servers()
     self.assertEqual(len(servers),2)
     self.assertEqual(servers['http'],'http://*****:*****@corp.com:8080')
     self.assertEqual(servers['https'], 'https://*****:*****@corp.com:8080')
예제 #11
0
def main(args, parser):
    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    args.created_recipes = []
    while args.packages:
        [output_dir] = args.output_dir

        package = args.packages.pop()
        args.created_recipes.append(package)

        is_url = ':' in package

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path) and not args.version_compare:
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package,
            {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'build_comment': '# ',
                'noarch_python_comment': '# ',
                'test_commands': '',
                'requires_comment': '#',
                'tests_require': '',
                'usemd5': '',
                'test_comment': '',
                'entry_comment': '# ',
                'egg_comment': '# ',
                'summary_comment': '',
                'home_comment': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
        else:
            versions = client.package_releases(package, True)
            if args.version_compare:
                version_compare(args, package, versions)
            if args.version:
                [version] = args.version
                if version not in versions:
                    sys.exit("Error: Version %s of %s is not available on PyPI."
                             % (version, package))
                d['version'] = version
            else:
                if not versions:
                    # The xmlrpc interface is case sensitive, but the index itself
                    # is apparently not (the last time I checked,
                    # len(set(all_packages_lower)) == len(set(all_packages)))
                    if package.lower() in all_packages_lower:
                        cased_package = all_packages[all_packages_lower.index(package.lower())]
                        if cased_package != package:
                            print("%s not found, trying %s" % (package, cased_package))
                            args.packages.append(cased_package)
                            del package_dicts[package]
                            continue
                    sys.exit("Error: Could not find any versions of package %s" % package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[0])
                    print("Use --version to specify a different version.")
                d['version'] = versions[0]

        data, d['pypiurl'], d['filename'], d['md5'] = get_download_data(args,
                                                                        client,
                                                                        package,
                                                                        d['version'],
                                                                        is_url)

        if d['md5'] == '':
            d['usemd5'] = '# '
        else:
            d['usemd5'] = ''

        d['import_tests'] = ''

        get_package_metadata(args, package, d, data)

        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = INDENT + d['import_tests']

        if d['tests_require'] == '':
            d['requires_comment'] = '# '
        else:
            d['requires_comment'] = ''
            d['tests_require'] = INDENT + d['tests_require']

        if d['entry_comment'] == d['import_comment'] == '# ':
            d['test_comment'] = '# '

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
예제 #12
0
log = getLogger(__name__)

# 1. get proxies if needed. a proxy for each  protocol
# 2. handle authentication
# basic, digest, and nltm (windows) authentications should be handled.
# 3. handle any protocol
# typically http, https, ftp

# 1. get the proxies list
# urllib can only get proxies on windows and mac. so on linux or if the user
# wants to specify the proxy there has to be a way to do that. TODO get proxies
# from condarc and overrwrite any system proxies
# the proxies are in a dict {'http':'http://proxy:8080'}
# protocol:proxyserver
proxies_dict = get_proxy_servers() or urllib2.getproxies()

#2. handle authentication

proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()


def get_userandpass(proxytype='',realm=''):
    """a function to get username and password from terminal.
    can be replaced with anything like some gui"""
    import getpass

    uname = input(proxytype + ' proxy username:')
    pword = getpass.getpass()
    return uname, pword
예제 #13
0
log = getLogger(__name__)

# 1. get proxies if needed. a proxy for each  protocol
# 2. handle authentication
# basic, digest, and nltm (windows) authentications should be handled.
# 3. handle any protocol
# typically http, https, ftp

# 1. get the proxies list
# urllib can only get proxies on windows and mac. so on linux or if the user
# wants to specify the proxy there has to be a way to do that. TODO get proxies
# from condarc and overrwrite any system proxies
# the proxies are in a dict {'http':'http://proxy:8080'}
# protocol:proxyserver
proxies_dict = get_proxy_servers() or urllib2.getproxies()

#2. handle authentication

proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()


def get_userandpass(proxytype='', realm=''):
    """a function to get username and password from terminal.
    can be replaced with anything like some gui"""
    import getpass

    uname = input(proxytype + ' proxy username:')
    pword = getpass.getpass()
    return uname, pword
예제 #14
0
파일: test_config.py 프로젝트: delicb/conda
 def test_proxy_settings(self):
     # reload the config file
     config.rc = config.load_condarc(config.rc_path)
     self.assertEqual(config.get_proxy_servers(),
                      {'http': 'http://*****:*****@corp.com:8080',
                       'https': 'https://*****:*****@corp.com:8080'})
예제 #15
0
# 1. get proxies if needed. a proxy for each  protocol
# 2. handle authentication
# basic, digest, and nltm (windows) authentications should be handled.
# 3. handle any protocol
# typically http, https, ftp

# 1. get the proxies list
proxies_dict=urllib2.getproxies()
# urllib can only get proxies on windows and mac. so on linux or if the user
# wants to specify the proxy there has to be a way to do that. TODO get proxies
# from condarc and overrwrite any system proxies
# the proxies are in a dict {'http':'http://proxy:8080'}
# protocol:proxyserver

if get_proxy_servers():
    proxies_dict = get_proxy_servers()

#2. handle authentication

proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()


def get_userandpass(proxytype='',realm=''):
    """a function to get username and password from terminal.
    can be replaced with anything like some gui"""
    import getpass

    uname = input(proxytype + ' proxy username:')
    pword = getpass.getpass()
    return uname, pword
예제 #16
0
파일: fetch.py 프로젝트: 3kwa/conda
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
    if not config.ssl_verify:
        try:
            from requests.packages.urllib3.connectionpool import InsecureRequestWarning
        except ImportError:
            pass
        else:
            warnings.simplefilter('ignore', InsecureRequestWarning)

    session = session or CondaSession(ssl_verify=config.ssl_verify,
                                      proxy_servers=config.get_proxy_servers())

    cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
    try:
        with open(cache_path) as f:
            cache = json.load(f)
    except (IOError, ValueError):
        cache = {'packages': {}}

    if use_cache:
        return cache

    headers = {}
    if "_etag" in cache:
        headers["If-None-Match"] = cache["_etag"]
    if "_mod" in cache:
        headers["If-Modified-Since"] = cache["_mod"]

    try:
        resp = session.get(url + 'repodata.json.bz2',
                           headers=headers, proxies=session.proxies)
        resp.raise_for_status()
        if resp.status_code != 304:
            cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
            add_http_value_to_dict(resp, 'Etag', cache, '_etag')
            add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')

    except ValueError as e:
        raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
                           (config.remove_binstar_tokens(url), e))

    except requests.exceptions.HTTPError as e:
        if e.response.status_code == 407: # Proxy Authentication Required
            handle_proxy_407(url, session)
            # Try again
            return fetch_repodata(url, cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)

        if e.response.status_code == 404:
            if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
                msg = ('Could not find anaconda.org user %s' %
                   config.remove_binstar_tokens(url).split(
                        config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
            else:
                if url.endswith('/noarch/'): # noarch directory might not exist
                    return None
                msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url)
        elif e.response.status_code == 403 and url.endswith('/noarch/'):
            return None

        elif (e.response.status_code == 401 and config.rc.get('channel_alias',
                        config.DEFAULT_CHANNEL_ALIAS) in url):
            # Note, this will not trigger if the binstar configured url does
            # not match the conda configured one.
            msg = ("Warning: you may need to login to anaconda.org again with "
                "'anaconda login' to access private packages(%s, %s)" %
                (config.hide_binstar_tokens(url), e))
            stderrlog.info(msg)
            return fetch_repodata(config.remove_binstar_tokens(url),
                                  cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)

        else:
            msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url))

        log.debug(msg)
        raise RuntimeError(msg)

    except requests.exceptions.SSLError as e:
        msg = "SSL Error: %s\n" % e
        stderrlog.info("SSL verification error: %s\n" % e)
        log.debug(msg)

    except requests.exceptions.ConnectionError as e:
        # requests isn't so nice here. For whatever reason, https gives this
        # error and http gives the above error. Also, there is no status_code
        # attribute here. We have to just check if it looks like 407.  See
        # https://github.com/kennethreitz/requests/issues/2061.
        if "407" in str(e): # Proxy Authentication Required
            handle_proxy_407(url, session)
            # Try again
            return fetch_repodata(url, cache_dir=cache_dir,
                                  use_cache=use_cache, session=session)

        msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url))
        stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url))
        log.debug(msg)
        if fail_unknown_host:
            raise RuntimeError(msg)

    cache['_url'] = config.remove_binstar_tokens(url)
    try:
        with open(cache_path, 'w') as fo:
            json.dump(cache, fo, indent=2, sort_keys=True)
    except IOError:
        pass

    return cache or None
예제 #17
0
 def test_proxy_settings(self):
     self.assertEqual(
         config.get_proxy_servers(), {
             'http': 'http://*****:*****@corp.com:8080',
             'https': 'https://*****:*****@corp.com:8080'
         })
예제 #18
0
def main(args, parser):

    if len(args.packages) > 1 and args.download:
        # Because if a package's setup.py imports setuptools, it will make all
        # future packages look like they depend on distribute. Also, who knows
        # what kind of monkeypatching the setup.pys out there could be doing.
        print("WARNING: building more than one recipe at once without "
              "--no-download is not recommended")

    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir
    indent = '\n    - '

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    while args.packages:
        package = args.packages.pop()
        # Look for package[extra,...] features spec:
        match_extras = re.match(r'^([^[]+)\[([^]]+)\]$', package)
        if match_extras:
            package, extras = match_extras.groups()
            extras = extras.split(',')
        else:
            extras = []
        dir_path = join(output_dir, package.lower())
        if exists(dir_path):
            raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package, {'packagename': package.lower(),
                                               'run_depends': '',
                                               'build_depends': '',
                                               'entry_points': '',
                                               'build_comment': '# ',
                                               'test_commands': '',
                                               'usemd5': '',
                                               'entry_comment': '#',
                                               'egg_comment': '#'})
        d['import_tests'] = valid(package).lower()
        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = indent + d['import_tests']

        if args.version:
            [version] = args.version
            versions = client.package_releases(package, True)
            if version not in versions:
                sys.exit("Error: Version %s of %s is not available on PyPI."
                         % (version, package))
            d['version'] = version
        else:
            versions = client.package_releases(package)
            if not versions:
                # The xmlrpc interface is case sensitive, but the index itself
                # is apparently not (the last time I checked,
                # len(set(all_packages_lower)) == len(set(all_packages)))
                if package.lower() in all_packages_lower:
                    print("%s not found, trying %s" % (package, package.capitalize()))
                    args.packages.append(all_packages[all_packages_lower.index(package.lower())])
                    del package_dicts[package]
                    continue
                sys.exit("Error: Could not find any versions of package %s" %
                         package)
            if len(versions) > 1:
                print("Warning, the following versions were found for %s" %
                      package)
                for ver in versions:
                    print(ver)
                print("Using %s" % versions[0])
                print("Use --version to specify a different version.")
            d['version'] = versions[0]

        data = client.release_data(package, d['version'])
        urls = client.release_urls(package, d['version'])
        if not args.all_urls:
            # Try to find source urls
            urls = [url for url in urls if url['python_version'] == 'source']
        if not urls:
            if 'download_url' in data:
                urls = [defaultdict(str, {'url': data['download_url']})]
                urls[0]['filename'] = urls[0]['url'].split('/')[-1]
                d['usemd5'] = '#'
            else:
                sys.exit("Error: No source urls found for %s" % package)
        if len(urls) > 1 and not args.noprompt:
            print("More than one source version is available for %s:" %
                  package)
            for i, url in enumerate(urls):
                print("%d: %s (%s) %s" % (i, url['url'],
                                          human_bytes(url['size']),
                                          url['comment_text']))
            n = int(input("Which version should I use? "))
        else:
            n = 0

        print("Using url %s (%s) for %s." % (urls[n]['url'],
                                             human_bytes(urls[n]['size'] or 0),
                                             package))

        d['pypiurl'] = urls[n]['url']
        d['md5'] = urls[n]['md5_digest']
        d['filename'] = urls[n]['filename']

        d['homeurl'] = data['home_page']
        d['summary'] = repr(data['summary'])
        license_classifier = "License :: OSI Approved ::"
        if 'classifiers' in data:
            licenses = [classifier.split(license_classifier, 1)[1] for classifier in
                    data['classifiers'] if classifier.startswith(license_classifier)]
        else:
            licenses = []
        if not licenses:
            if data['license']:
                if args.noprompt:
                    license = data['license']
                elif '\n' not in data['license']:
                    print('Using "%s" for the license' % data['license'])
                    license = data['license']
                else:
                    # Some projects put the whole license text in this field
                    print("This is the license for %s" % package)
                    print()
                    print(data['license'])
                    print()
                    license = input("What license string should I use? ")
            else:
                if args.noprompt:
                    license = "UNKNOWN"
                else:
                    license = input(("No license could be found for %s on " +
                                     "PyPI. What license should I use? ") %
                                    package)
        else:
            license = ' or '.join(licenses)
        d['license'] = license

        # Unfortunately, two important pieces of metadata are only stored in
        # the package itself: the dependencies, and the entry points (if the
        # package uses distribute).  Our strategy is to download the package
        # and "fake" distribute/setuptools's setup() function to get this
        # information from setup.py. If this sounds evil, keep in mind that
        # distribute itself already works by monkeypatching distutils.
        if args.download:
            import yaml
            print("Downloading %s (use --no-download to skip this step)" %
                  package)
            tempdir = mkdtemp('conda_skeleton_' + package)

            if not isdir(SRC_CACHE):
                makedirs(SRC_CACHE)

            try:
                # Download it to the build source cache. That way, you have
                # it.
                download_path = join(SRC_CACHE, d['filename'])
                if not isfile(download_path) or hashsum_file(download_path,
                                                             'md5') != d['md5']:
                    download(d['pypiurl'], join(SRC_CACHE, d['filename']))
                else:
                    print("Using cached download")
                print("Unpacking %s..." % package)
                unpack(join(SRC_CACHE, d['filename']), tempdir)
                print("done")
                print("working in %s" % tempdir)
                src_dir = get_dir(tempdir)
                run_setuppy(src_dir, tempdir, args)
                with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn:
                    pkginfo = yaml.load(fn)

                setuptools_build = pkginfo['setuptools']
                setuptools_run = False

                # Look at the entry_points and construct console_script and
                #  gui_scripts entry_points for conda
                entry_points = pkginfo['entry_points']
                if entry_points:
                    if isinstance(entry_points, str):
                        # makes sure it is left-shifted
                        newstr = "\n".join(x.strip()
                                           for x in entry_points.split('\n'))
                        config = configparser.ConfigParser()
                        entry_points = {}
                        try:
                            config.readfp(StringIO(newstr))
                        except Exception as err:
                            print("WARNING: entry-points not understood: ",
                                  err)
                            print("The string was", newstr)
                            entry_points = pkginfo['entry_points']
                        else:
                            setuptools_run = True
                            for section in config.sections():
                                if section in ['console_scripts', 'gui_scripts']:
                                    value = ['%s=%s' % (option, config.get(section, option))
                                             for option in config.options(section)]
                                    entry_points[section] = value
                    if not isinstance(entry_points, dict):
                        print("WARNING: Could not add entry points. They were:")
                        print(entry_points)
                    else:
                        cs = entry_points.get('console_scripts', [])
                        gs = entry_points.get('gui_scripts', [])
                        # We have *other* kinds of entry-points so we need
                        # setuptools at run-time
                        if not cs and not gs and len(entry_points) > 1:
                            setuptools_build = True
                            setuptools_run = True
                        entry_list = (
                            cs
                            # TODO: Use pythonw for these
                            + gs)
                        if len(cs + gs) != 0:
                            d['entry_points'] = indent.join([''] + entry_list)
                            d['entry_comment'] = ''
                            d['build_comment'] = ''
                            d['test_commands'] = indent.join([''] + make_entry_tests(entry_list))

                # Extract requested extra feature requirements...
                if args.all_extras:
                    extras_require = list(pkginfo['extras_require'].values())
                else:
                    try:
                        extras_require = [pkginfo['extras_require'][x] for x in extras]
                    except KeyError:
                        sys.exit("Error: Invalid extra features: [%s]"
                             % ','.join(extras))
                #... and collect all needed requirement specs in a single list:
                requires = []
                for specs in [pkginfo['install_requires']] + extras_require:
                    if isinstance(specs, string_types):
                        requires.append(specs)
                    else:
                        requires.extend(specs)
                if requires or setuptools_build or setuptools_run:
                    deps = []
                    for deptext in requires:
                        # Every item may be a single requirement
                        #  or a multiline requirements string...
                        for dep in deptext.split('\n'):
                            #... and may also contain comments...
                            dep = dep.split('#')[0].strip()
                            if dep: #... and empty (or comment only) lines
                                spec = spec_from_line(dep)
                                if spec is None:
                                    sys.exit("Error: Could not parse: %s" % dep)
                                deps.append(spec)

                    if 'setuptools' in deps:
                        setuptools_build = False
                        setuptools_run = False
                        d['egg_comment'] = ''
                        d['build_comment'] = ''
                    d['build_depends'] = indent.join([''] +
                                                     ['setuptools'] * setuptools_build +
                                                     deps)
                    d['run_depends'] = indent.join([''] +
                                                   ['setuptools'] * setuptools_run +
                                                   deps)

                    if args.recursive:
                        for dep in deps:
                            dep = dep.split()[0]
                            if not exists(join(output_dir, dep)):
                                args.packages.append(dep)

                if pkginfo['packages']:
                    deps = set(pkginfo['packages'])
                    if d['import_tests']:
                        olddeps = [x for x in d['import_tests'].split()
                                   if x != '-']
                        deps = set(olddeps) | deps
                    d['import_tests'] = indent.join([''] + sorted(deps))
                    d['import_comment'] = ''
            finally:
                rm_rf(tempdir)

    for package in package_dicts:
        d = package_dicts[package]
        makedirs(join(output_dir, package.lower()))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, package.lower(), 'meta.yaml'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, package.lower(), 'build.sh'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, package.lower(), 'bld.bat'), 'w',
                  encoding='utf-8') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")
예제 #19
0
파일: test_config.py 프로젝트: Zibi92/conda
 def test_proxy_settings(self):
     self.assertEqual(config.get_proxy_servers(),
                      {'http': 'http://*****:*****@corp.com:8080',
                       'https': 'https://*****:*****@corp.com:8080'})
예제 #20
0
def main(args, parser):
    proxies = get_proxy_servers()

    if proxies:
        transport = RequestsTransport()
    else:
        transport = None
    client = ServerProxy(args.pypi_url, transport=transport)
    package_dicts = {}
    [output_dir] = args.output_dir

    all_packages = client.list_packages()
    all_packages_lower = [i.lower() for i in all_packages]

    args.created_recipes = []
    while args.packages:
        [output_dir] = args.output_dir

        package = args.packages.pop()
        args.created_recipes.append(package)

        is_url = ':' in package

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path) and not args.version_compare:
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package,
            {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'build_comment': '# ',
                'noarch_python_comment': '# ',
                'test_commands': '',
                'requires_comment': '#',
                'tests_require': '',
                'usemd5': '',
                'test_comment': '',
                'entry_comment': '# ',
                'egg_comment': '# ',
                'summary_comment': '',
                'home_comment': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
        else:
            versions = client.package_releases(package, True)
            if args.version_compare:
                version_compare(args, package, versions)
            if args.version:
                [version] = args.version
                if version not in versions:
                    sys.exit("Error: Version %s of %s is not available on PyPI."
                             % (version, package))
                d['version'] = version
            else:
                if not versions:
                    # The xmlrpc interface is case sensitive, but the index itself
                    # is apparently not (the last time I checked,
                    # len(set(all_packages_lower)) == len(set(all_packages)))
                    if package.lower() in all_packages_lower:
                        cased_package = all_packages[all_packages_lower.index(package.lower())]
                        if cased_package != package:
                            print("%s not found, trying %s" % (package, cased_package))
                            args.packages.append(cased_package)
                            del package_dicts[package]
                            continue
                    sys.exit("Error: Could not find any versions of package %s" % package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[0])
                    print("Use --version to specify a different version.")
                d['version'] = versions[0]

        data, d['pypiurl'], d['filename'], d['md5'] = get_download_data(args,
                                                                        client,
                                                                        package,
                                                                        d['version'],
                                                                        is_url)

        if d['md5'] == '':
            d['usemd5'] = '# '
        else:
            d['usemd5'] = ''

        d['import_tests'] = ''

        get_package_metadata(args, package, d, data)

        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = INDENT + d['import_tests']

        if d['tests_require'] == '':
            d['requires_comment'] = '# '
        else:
            d['requires_comment'] = ''
            d['tests_require'] = INDENT + d['tests_require']

        if d['entry_comment'] == d['import_comment'] == '# ':
            d['test_comment'] = '# '

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))

    print("Done")