Пример #1
0
def get_pkginfo(package, filename, pypiurl, digest, python_version,
                extra_specs, config, setup_options):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(config.src_cache):
        makedirs(config.src_cache)

    hash_type = digest[0]
    hash_value = digest[1]
    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(config.src_cache, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, hash_type) != hash_value:
            download(pypiurl, join(config.src_cache, filename))
            if hashsum_file(download_path, hash_type) != hash_value:
                raise RuntimeError(
                    ' Download of {} failed'
                    ' checksum type {} expected value {}. Please'
                    ' try again.'.format(package, hash_type, hash_value))
        else:
            print("Using cached download")
        # Calculate the preferred hash type here if necessary.
        # Needs to be done in this block because this is where we have
        # access to the source file.
        if hash_type != 'sha256':
            new_hash_value = hashsum_file(download_path, 'sha256')
        else:
            new_hash_value = ''

        print("Unpacking %s..." % package)
        unpack(join(config.src_cache, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir,
                    tempdir,
                    python_version,
                    extra_specs=extra_specs,
                    config=config,
                    setup_options=setup_options)
        try:
            with open(join(tempdir, 'pkginfo.yaml')) as fn:
                pkg_info = yaml.safe_load(fn)
        except IOError:
            pkg_info = pkginfo.SDist(download_path).__dict__
        if new_hash_value:
            pkg_info['new_hash_value'] = ('sha256', new_hash_value)
    finally:
        rm_rf(tempdir)

    return pkg_info
Пример #2
0
def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, config,
                setup_options):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(config.src_cache):
        makedirs(config.src_cache)

    hash_type = digest[0]
    hash_value = digest[1]
    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(config.src_cache, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, hash_type) != hash_value:
            download(pypiurl, join(config.src_cache, filename))
            if hashsum_file(download_path, hash_type) != hash_value:
                raise RuntimeError(' Download of {} failed'
                                   ' checksum type {} expected value {}. Please'
                                   ' try again.'.format(package, hash_type, hash_value))
        else:
            print("Using cached download")
        # Calculate the preferred hash type here if necessary.
        # Needs to be done in this block because this is where we have
        # access to the source file.
        if hash_type != POSSIBLE_DIGESTS[0]:
            new_hash_value = hashsum_file(download_path, POSSIBLE_DIGESTS[0])
        else:
            new_hash_value = ''

        print("Unpacking %s..." % package)
        unpack(join(config.src_cache, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config,
                    setup_options=setup_options)
        try:
            with open(join(tempdir, 'pkginfo.yaml')) as fn:
                pkg_info = yaml.safe_load(fn)
        except IOError:
            pkg_info = pkginfo.SDist(download_path).__dict__
        if new_hash_value:
            pkg_info['new_hash_value'] = (POSSIBLE_DIGESTS[0], new_hash_value)
    finally:
        rm_rf(tempdir)

    return pkg_info
Пример #3
0
def get_pkginfo(package, filename, pypiurl, md5, python_version, config, setup_options):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(config.src_cache):
        makedirs(config.src_cache)

    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(config.src_cache, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, 'md5') != md5:
            download(pypiurl, join(config.src_cache, filename))
        else:
            print("Using cached download")
        print("Unpacking %s..." % package)
        unpack(join(config.src_cache, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir, tempdir, python_version, config=config, setup_options=setup_options)
        with open(join(tempdir, 'pkginfo.yaml')) as fn:
            pkginfo = yaml.load(fn)
    finally:
        rm_rf(tempdir)

    return pkginfo
Пример #4
0
def test_source_user_expand(testing_workdir):
    with TemporaryDirectory(dir=os.path.expanduser('~')) as tmp:
        with TemporaryDirectory() as tbz_srcdir:
            file_txt = os.path.join(tbz_srcdir, "file.txt")
            with open(file_txt, 'w') as f:
                f.write("hello")
            tbz_name = os.path.join(tmp, "cb-test.tar.bz2")
            with tarfile.open(tbz_name, "w:bz2") as tar:
                tar.add(tbz_srcdir, arcname=os.path.sep)
            for prefix in ('~', 'file:///~'):
                source_dict = {
                    "url":
                    os.path.join(prefix, os.path.basename(tmp),
                                 "cb-test.tar.bz2"),
                    "sha256":
                    hashsum_file(tbz_name, 'sha256')
                }
                with TemporaryDirectory() as tmp2:
                    download_to_cache(tmp2, '', source_dict)