Esempio n. 1
0
def unpack_http_url(link, location, download_dir=None, session=None):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'"
        )

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir)

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = mimetypes.guess_type(from_path)[0]
    else:
        # let's download to a tmp dir
        from_path, content_type = _download_http_url(link, session, temp_dir)

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified; let's copy the archive there
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, content_type, link)

    if not already_downloaded_path:
        os.unlink(from_path)
    os.rmdir(temp_dir)
Esempio n. 2
0
def unpack_http_url(link,
                    location,
                    download_dir=None,
                    session=None,
                    hashes=None,
                    progress_bar="on"):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'")

    with TempDirectory(kind="unpack") as temp_dir:
        # If a download dir is specified, is the file already downloaded there?
        already_downloaded_path = None
        if download_dir:
            already_downloaded_path = _check_download_dir(
                link, download_dir, hashes)

        if already_downloaded_path:
            from_path = already_downloaded_path
            content_type = mimetypes.guess_type(from_path)[0]
        else:
            # let's download to a tmp dir
            from_path, content_type = _download_http_url(
                link, session, temp_dir.path, hashes, progress_bar)

        # unpack the archive to the build dir location. even when only
        # downloading archives, they have to be unpacked to parse dependencies
        unpack_file(from_path, location, content_type, link)

        # a download dir is specified; let's copy the archive there
        if download_dir and not already_downloaded_path:
            _copy_file(from_path, download_dir, link)

        if not already_downloaded_path:
            os.unlink(from_path)
Esempio n. 3
0
def unpack_http_url(link, location, download_dir=None, session=None):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'")

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir)

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = mimetypes.guess_type(from_path)[0]
    else:
        # let's download to a tmp dir
        from_path, content_type = _download_http_url(link, session, temp_dir)

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified; let's copy the archive there
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, content_type, link)

    if not already_downloaded_path:
        os.unlink(from_path)
    rmtree(temp_dir)
Esempio n. 4
0
def _copy_dist_from_dir(link_path, location):
    """Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    """

    # Note: This is currently VERY SLOW if you have a lot of data in the
    # directory, because it copies everything with `shutil.copytree`.
    # What it should really do is build an sdist and install that.
    # See https://github.com/pypa/pip/issues/2195

    if os.path.isdir(location):
        rmtree(location)

    # build an sdist
    setup_py = 'setup.py'
    sdist_args = [sys.executable]
    sdist_args.append('-c')
    sdist_args.append(SETUPTOOLS_SHIM % setup_py)
    sdist_args.append('sdist')
    sdist_args += ['--dist-dir', location]
    logger.info('Running setup.py sdist for %s', link_path)

    with indent_log():
        call_subprocess(sdist_args, cwd=link_path, show_stdout=False)

    # unpack sdist into `location`
    sdist = os.path.join(location, os.listdir(location)[0])
    logger.info('Unpacking sdist %s into %s', sdist, location)
    unpack_file(sdist, location, content_type=None, link=None)
Esempio n. 5
0
def _copy_dist_from_dir(link_path, location):
    """Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    """

    # Note: This is currently VERY SLOW if you have a lot of data in the
    # directory, because it copies everything with `shutil.copytree`.
    # What it should really do is build an sdist and install that.
    # See https://github.com/pypa/pip/issues/2195

    if os.path.isdir(location):
        rmtree(location)

    # build an sdist
    setup_py = 'setup.py'
    sdist_args = [sys.executable]
    sdist_args.append('-c')
    sdist_args.append(SETUPTOOLS_SHIM % setup_py)
    sdist_args.append('sdist')
    sdist_args += ['--dist-dir', location]
    logger.info('Running setup.py sdist for %s', link_path)

    with indent_log():
        call_subprocess(sdist_args, cwd=link_path, show_stdout=False)

    # unpack sdist into `location`
    sdist = os.path.join(location, os.listdir(location)[0])
    logger.info('Unpacking sdist %s into %s', sdist, location)
    unpack_file(sdist, location, content_type=None, link=None)
Esempio n. 6
0
def test_wheel_version(tmpdir, data):
    future_wheel = "futurewheel-1.9-py2.py3-none-any.whl"
    broken_wheel = "brokenwheel-1.0-py2.py3-none-any.whl"
    future_version = (1, 9)

    unpack_file(data.packages.join(future_wheel), tmpdir + "future", None, None)
    unpack_file(data.packages.join(broken_wheel), tmpdir + "broken", None, None)

    assert wheel.wheel_version(tmpdir + "future") == future_version
    assert not wheel.wheel_version(tmpdir + "broken")
Esempio n. 7
0
def test_wheel_version(tmpdir, data):
    future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl'
    broken_wheel = 'brokenwheel-1.0-py2.py3-none-any.whl'
    future_version = (1, 9)

    unpack_file(data.packages.join(future_wheel), tmpdir + 'future', None,
                None)
    unpack_file(data.packages.join(broken_wheel), tmpdir + 'broken', None,
                None)

    assert wheel.wheel_version(tmpdir + 'future') == future_version
    assert not wheel.wheel_version(tmpdir + 'broken')
Esempio n. 8
0
def test_wheel_version(tmpdir, data):
    future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl'
    broken_wheel = 'brokenwheel-1.0-py2.py3-none-any.whl'
    future_version = (1, 9)

    unpack_file(data.packages.join(future_wheel),
                tmpdir + 'future', None, None)
    unpack_file(data.packages.join(broken_wheel),
                tmpdir + 'broken', None, None)

    assert wheel.wheel_version(tmpdir + 'future') == future_version
    assert not wheel.wheel_version(tmpdir + 'broken')
Esempio n. 9
0
def unpack_file_url(link, location, download_dir=None, hashes=None, toto_verify=None, toto_default=None):
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """
    link_path = url_to_path(link.url_without_fragment)
    ####print "unpack_file_url()"
    ####print location
    # If it's a url to a local directory
    if is_dir_url(link):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True)
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        in_toto_verify_wrapper(location, toto_verify=toto_verify, toto_default=toto_default)
        return
    ####print "outside of if is_dir_url"
    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(link_path)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)
    
    #intoto verification
    #print "about to delete location: %s" % location
    in_toto_verify_wrapper(location, toto_verify=toto_verify, toto_default=toto_default)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)
Esempio n. 10
0
 def prep(self, data, tmpdir):
     self.name = "sample"
     self.wheelpath = data.packages.join("sample-1.2.0-py2.py3-none-any.whl")
     self.req = Requirement("sample")
     self.src = os.path.join(tmpdir, "src")
     self.dest = os.path.join(tmpdir, "dest")
     unpack_file(self.wheelpath, self.src, None, None)
     self.scheme = {
         "scripts": os.path.join(self.dest, "bin"),
         "purelib": os.path.join(self.dest, "lib"),
         "data": os.path.join(self.dest, "data"),
     }
     self.src_dist_info = os.path.join(self.src, "sample-1.2.0.dist-info")
     self.dest_dist_info = os.path.join(self.scheme["purelib"], "sample-1.2.0.dist-info")
Esempio n. 11
0
    def test_unpack_wheel_no_flatten(self):
        from pip import utils
        from tempfile import mkdtemp
        from shutil import rmtree

        filepath = '../data/packages/meta-1.0-py2.py3-none-any.whl'
        if not os.path.exists(filepath):
            pytest.skip("%s does not exist" % filepath)
        try:
            tmpdir = mkdtemp()
            utils.unpack_file(filepath, tmpdir, 'application/zip', None)
            assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
        finally:
            rmtree(tmpdir)
            pass
Esempio n. 12
0
    def test_unpack_wheel_no_flatten(self):
        from pip import utils
        from tempfile import mkdtemp
        from shutil import rmtree

        filepath = '../data/packages/meta-1.0-py2.py3-none-any.whl'
        if not os.path.exists(filepath):
            pytest.skip("%s does not exist" % filepath)
        try:
            tmpdir = mkdtemp()
            utils.unpack_file(filepath, tmpdir, 'application/zip', None)
            assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
        finally:
            rmtree(tmpdir)
            pass
Esempio n. 13
0
 def prep(self, data, tmpdir):
     self.name = 'sample'
     self.wheelpath = data.packages.join(
         'sample-1.2.0-py2.py3-none-any.whl')
     self.req = Requirement('sample')
     self.src = os.path.join(tmpdir, 'src')
     self.dest = os.path.join(tmpdir, 'dest')
     unpack_file(self.wheelpath, self.src, None, None)
     self.scheme = {
         'scripts': os.path.join(self.dest, 'bin'),
         'purelib': os.path.join(self.dest, 'lib'),
         'data': os.path.join(self.dest, 'data'),
     }
     self.src_dist_info = os.path.join(self.src, 'sample-1.2.0.dist-info')
     self.dest_dist_info = os.path.join(self.scheme['purelib'],
                                        'sample-1.2.0.dist-info')
Esempio n. 14
0
def unpack_http_url(link,
                    location,
                    download_dir=None,
                    session=None,
                    hashes=None):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'")

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = mimetypes.guess_type(from_path)[0]
    else:
        # let's download to a tmp dir
        from_path, content_type = _download_http_url(link, session, temp_dir,
                                                     hashes)
        # FIXME: this is a dirty kludge to force downloading the toto metadata
        sig_url = ".asc#".join(link.url.split("#"))
        sig_link = copy.deepcopy(link)
        sig_link.url = sig_url

        sig_path, content_type = _download_http_url(sig_link, session,
                                                    temp_dir, None)

    if _unpack_toto_metadata(sig_path, temp_dir):
        _verify_toto_metadata(temp_dir)

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified; let's copy the archive there
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)

    if not already_downloaded_path:
        os.unlink(from_path)
    rmtree(temp_dir)
Esempio n. 15
0
def unpack_file_url(link, location, download_dir=None, hashes=None):
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """
    link_path = url_to_path(link.url_without_fragment)

    # If it's a url to a local directory
    if is_dir_url(link):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True)
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(link_path)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)
Esempio n. 16
0
 def prep(self, data, tmpdir):
     self.name = 'sample'
     self.wheelpath = data.packages.join(
         'sample-1.2.0-py2.py3-none-any.whl')
     self.req = pkg_resources.Requirement.parse('sample')
     self.src = os.path.join(tmpdir, 'src')
     self.dest = os.path.join(tmpdir, 'dest')
     unpack_file(self.wheelpath, self.src, None, None)
     self.scheme = {
         'scripts': os.path.join(self.dest, 'bin'),
         'purelib': os.path.join(self.dest, 'lib'),
         'data': os.path.join(self.dest, 'data'),
     }
     self.src_dist_info = os.path.join(
         self.src, 'sample-1.2.0.dist-info')
     self.dest_dist_info = os.path.join(
         self.scheme['purelib'], 'sample-1.2.0.dist-info')
Esempio n. 17
0
def unpack_file_url(link, location, download_dir=None):
    """Unpack link into location.
    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir."""

    link_path = url_to_path(link.url_without_fragment)

    # If it's a url to a local directory
    if os.path.isdir(link_path):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path,
                        location,
                        symlinks=True,
                        ignore=shutil.ignore_patterns('.tox', '.git', '.hg',
                                                      '.bzr', '.svn'))
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # if link has a hash, let's confirm it matches
    if link.hash:
        link_path_hash = _get_hash_from_file(link_path, link)
        _check_hash(link_path_hash, link)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, content_type, link)
Esempio n. 18
0
def unpack_file_url(link, location, download_dir=None):
    """Unpack link into location.
    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir."""

    link_path = url_to_path(link.url_without_fragment)

    # If it's a url to a local directory
    if os.path.isdir(link_path):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(
            link_path, location, symlinks=True,
            ignore=shutil.ignore_patterns(
                '.tox', '.git', '.hg', '.bzr', '.svn'))
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # if link has a hash, let's confirm it matches
    if link.hash:
        link_path_hash = _get_hash_from_file(link_path, link)
        _check_hash(link_path_hash, link)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, content_type, link)
Esempio n. 19
0
def get_archive_details(filename, pbr_version=None):
    if not sh.isfile(filename):
        raise IOError("Can not detail non-existent file %s" % (filename))

    # Check if we already got the details of this file previously
    with EGGS_DETAILED_LOCK:
        cache_key = "f:%s:%s" % (sh.basename(filename), sh.getsize(filename))
        if cache_key in EGGS_DETAILED:
            return EGGS_DETAILED[cache_key]

        # Get pip to get us the egg-info.
        with utils.tempdir() as td:
            filename = sh.copy(filename, sh.joinpths(td, sh.basename(filename)))
            extract_to = sh.mkdir(sh.joinpths(td, 'build'))
            pip_util.unpack_file(filename, extract_to, content_type='', link='')
            egg_details = get_directory_details(extract_to,
                                                pbr_version=pbr_version)

        EGGS_DETAILED[cache_key] = egg_details
        return egg_details
Esempio n. 20
0
def get_archive_details(filename, pbr_version=None):
    if not sh.isfile(filename):
        raise IOError("Can not detail non-existent file %s" % (filename))

    # Check if we already got the details of this file previously
    with EGGS_DETAILED_LOCK:
        cache_key = "f:%s:%s" % (sh.basename(filename), sh.getsize(filename))
        if cache_key in EGGS_DETAILED:
            return EGGS_DETAILED[cache_key]

        # Get pip to get us the egg-info.
        with utils.tempdir() as td:
            filename = sh.copy(filename, sh.joinpths(td,
                                                     sh.basename(filename)))
            extract_to = sh.mkdir(sh.joinpths(td, 'build'))
            pip_util.unpack_file(filename,
                                 extract_to,
                                 content_type='',
                                 link='')
            egg_details = get_directory_details(extract_to,
                                                pbr_version=pbr_version)

        EGGS_DETAILED[cache_key] = egg_details
        return egg_details
Esempio n. 21
0
def unpack_http_url(link, location, download_dir=None,
                    session=None, hashes=None, progress_bar="on"):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'"
        )

    with TempDirectory(kind="unpack") as temp_dir:
        # If a download dir is specified, is the file already downloaded there?
        already_downloaded_path = None
        if download_dir:
            already_downloaded_path = _check_download_dir(link,
                                                          download_dir,
                                                          hashes)

        if already_downloaded_path:
            from_path = already_downloaded_path
            content_type = mimetypes.guess_type(from_path)[0]
        else:
            # let's download to a tmp dir
            from_path, content_type = _download_http_url(link,
                                                         session,
                                                         temp_dir.path,
                                                         hashes,
                                                         progress_bar)

        # unpack the archive to the build dir location. even when only
        # downloading archives, they have to be unpacked to parse dependencies
        unpack_file(from_path, location, content_type, link)

        # a download dir is specified; let's copy the archive there
        if download_dir and not already_downloaded_path:
            _copy_file(from_path, download_dir, link)

        if not already_downloaded_path:
            os.unlink(from_path)