Ejemplo n.º 1
0
    def run(self, options, args):

        deprecation = textwrap.dedent("""

            ###############################################
            ##                                           ##
            ##  Due to lack of interest and maintenance, ##
            ##  'pip bundle' and support for installing  ##
            ##  from *.pybundle files is deprecated, as  ##
            ##  of v1.4 and will be removed in a         ##
            ##  future release.                          ##
            ##                                           ##
            ###############################################

        """)
        logger.warn(deprecation)

        if not args:
            raise InstallationError('You must give a bundle filename')
        # We have to get everything when creating a bundle:
        options.ignore_installed = True
        logger.notify('Putting temporary build files in %s and source/develop files in %s'
                      % (display_path(options.build_dir), display_path(options.src_dir)))
        self.bundle_filename = args.pop(0)
        requirement_set = super(BundleCommand, self).run(options, args)
        return requirement_set
Ejemplo n.º 2
0
 def get_info(self, location):
     """Returns (url, revision), where both are strings"""
     assert not location.rstrip('/').endswith(self.dirname), \
         'Bad directory: %s' % location
     output = call_subprocess(
         [self.cmd, 'info', location],
         show_stdout=False,
         extra_environ={'LANG': 'C'},
     )
     match = _svn_url_re.search(output)
     if not match:
         logger.warn(
             'Cannot determine URL of svn checkout %s' %
             display_path(location)
         )
         return None, None
     url = match.group(1).strip()
     match = _svn_revision_re.search(output)
     if not match:
         logger.warn(
             'Cannot determine revision of svn checkout %s' %
             display_path(location)
         )
         logger.info('Output that cannot be parsed: \n%s' % output)
         return url, None
     return url, match.group(1)
Ejemplo n.º 3
0
 def get_src_requirement(self, dist, location, find_tags=False):
     repo = self.get_url(location)
     if repo is None:
         return None
     parts = repo.split('/')
     ## FIXME: why not project name?
     egg_project_name = dist.egg_name().split('-', 1)[0]
     rev = self.get_revision(location)
     if parts[-2] in ('tags', 'tag'):
         # It's a tag, perfect!
         full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
     elif parts[-2] in ('branches', 'branch'):
         # It's a branch :(
         full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
     elif parts[-1] == 'trunk':
         # Trunk :-/
         full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
         if find_tags:
             tag_url = '/'.join(parts[:-1]) + '/tags'
             tag_revs = self.get_tag_revs(tag_url)
             match = self.find_tag_match(rev, tag_revs)
             if match:
                 logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
                 repo = '%s/%s' % (tag_url, match)
                 full_egg_name = '%s-%s' % (egg_project_name, match)
     else:
         # Don't know what it is
         logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
         full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
     return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
Ejemplo n.º 4
0
    def from_dist(cls, dist, find_tags=False):
        location = os.path.normcase(os.path.abspath(dist.location))
        comments = []
        from pip.vcs import vcs, get_src_requirement
        if vcs.get_backend_name(location):
            editable = True
            try:
                req = get_src_requirement(dist, location, find_tags)
            except InstallationError as exc:
                logger.warn(
                    "Error when trying to get requirement for VCS system %s, "
                    "falling back to uneditable format" % exc
                )
                req = None
            if req is None:
                logger.warn(
                    'Could not determine repository location of %s' % location
                )
                comments.append(
                    '## !! Could not determine repository location'
                )
                req = dist.as_requirement()
                editable = False
        else:
            editable = False
            req = dist.as_requirement()
            specs = req.specs
            assert len(specs) == 1 and specs[0][0] == '=='

        return cls(dist.project_name, req, editable, comments)
Ejemplo n.º 5
0
def get_src_requirement(dist, location, find_tags):
    version_control = vcs.get_backend_from_location(location)
    if version_control:
        return version_control().get_src_requirement(dist, location, find_tags)
    logger.warn(
        'cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
    return dist.as_requirement()
Ejemplo n.º 6
0
 def move_bundle_files(self, dest_build_dir, dest_src_dir):
     base = self._temp_build_dir
     assert base
     src_dir = os.path.join(base, 'src')
     build_dir = os.path.join(base, 'build')
     bundle_build_dirs = []
     bundle_editable_dirs = []
     for source_dir, dest_dir, dir_collection in [
             (src_dir, dest_src_dir, bundle_editable_dirs),
             (build_dir, dest_build_dir, bundle_build_dirs)]:
         if os.path.exists(source_dir):
             for dirname in os.listdir(source_dir):
                 dest = os.path.join(dest_dir, dirname)
                 dir_collection.append(dest)
                 if os.path.exists(dest):
                     logger.warn(
                         'The directory %s (containing package %s) already '
                         'exists; cannot move source from bundle %s' %
                         (dest, dirname, self)
                     )
                     continue
                 if not os.path.exists(dest_dir):
                     logger.info('Creating directory %s' % dest_dir)
                     os.makedirs(dest_dir)
                 shutil.move(os.path.join(source_dir, dirname), dest)
             if not os.listdir(source_dir):
                 os.rmdir(source_dir)
     self._temp_build_dir = None
     self._bundle_build_dirs = bundle_build_dirs
     self._bundle_editable_dirs = bundle_editable_dirs
Ejemplo n.º 7
0
 def get_url(self, location):
     # In cases where the source is in a subdirectory, not alongside setup.py
     # we have to look up in the location until we find a real setup.py
     orig_location = location
     while not os.path.exists(os.path.join(location, 'setup.py')):
         last_location = location
         location = os.path.dirname(location)
         if location == last_location:
             # We've traversed up to the root of the filesystem without finding setup.py
             logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
                         % orig_location)
             return None
     f = open(os.path.join(location, self.dirname, 'entries'))
     data = f.read()
     f.close()
     if data.startswith('8') or data.startswith('9') or data.startswith('10'):
         data = list(map(str.splitlines, data.split('\n\x0c\n')))
         del data[0][0]  # get rid of the '8'
         return data[0][3]
     elif data.startswith('<?xml'):
         match = _svn_xml_url_re.search(data)
         if not match:
             raise ValueError('Badly formatted data: %r' % data)
         return match.group(1)    # get repository URL
     else:
         logger.warn("Unrecognized .svn/entries format in %s" % location)
         # Or raise exception?
         return None
Ejemplo n.º 8
0
 def unregister(self, cls=None, name=None):
     if name in self._registry:
         del self._registry[name]
     elif cls in self._registry.values():
         del self._registry[cls.name]
     else:
         logger.warn('Cannot unregister because no class or name given')
Ejemplo n.º 9
0
 def remove_filename_from_pth(self, filename):
     for pth in self.pth_files():
         f = open(pth, 'r')
         lines = f.readlines()
         f.close()
         new_lines = [
             l for l in lines if l.strip() != filename]
         if lines != new_lines:
             logger.info('Removing reference to %s from .pth file %s'
                         % (display_path(filename), display_path(pth)))
             if not [line for line in new_lines if line]:
                 logger.info(
                     '%s file would be empty: deleting' % display_path(pth)
                 )
                 if not self.simulate:
                     os.unlink(pth)
             else:
                 if not self.simulate:
                     f = open(pth, 'wb')
                     f.writelines(new_lines)
                     f.close()
             return
     logger.warn(
         'Cannot find a reference to %s in any .pth file' %
         display_path(filename)
     )
Ejemplo n.º 10
0
def check_compatibility(version, name):
    """
    Raises errors or warns if called with an incompatible Wheel-Version.

    Pip should refuse to install a Wheel-Version that's a major series
    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
    installing a version only minor version ahead (e.g 1.2 > 1.1).

    version: a 2-tuple representing a Wheel-Version (Major, Minor)
    name: name of wheel or package to raise exception about

    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
    """
    if not version:
        raise UnsupportedWheel(
            "%s is in an unsupported or invalid wheel" % name
        )
    if version[0] > VERSION_COMPATIBLE[0]:
        raise UnsupportedWheel(
            "%s's Wheel-Version (%s) is not compatible with this version "
            "of pip" % (name, '.'.join(map(str, version)))
        )
    elif version > VERSION_COMPATIBLE:
        logger.warn('Installing from a newer Wheel-Version (%s)'
                    % '.'.join(map(str, version)))
Ejemplo n.º 11
0
    def run(self, options, args):
        if not args:
            logger.warn('ERROR: Please provide a package name or names.')
            return
        query = args

        results = self.search_packages_info(query, options.index)
        self.print_results(results, options.files)
Ejemplo n.º 12
0
def wheel_setuptools_support():
    """
    Return True if we have a setuptools that supports wheel.
    """
    fulfilled = hasattr(pkg_resources, 'DistInfoDistribution')
    if not fulfilled:
        logger.warn("Wheel installs require setuptools >= 0.8 for dist-info support.")
    return fulfilled
Ejemplo n.º 13
0
    def run(self, options, args):
        if not args:
            logger.warn("ERROR: Please provide a project name or names.")
            return
        query = args

        results = search_packages_info(query)
        print_results(results, options.files)
Ejemplo n.º 14
0
def _download_url(resp, link, temp_location):
    fp = open(temp_location, 'wb')
    download_hash = None
    if link.hash and link.hash_name:
        try:
            download_hash = hashlib.new(link.hash_name)
        except ValueError:
            logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
    try:
        total_length = int(resp.headers['content-length'])
    except (ValueError, KeyError, TypeError):
        total_length = 0
    downloaded = 0
    show_progress = total_length > 40 * 1000 or not total_length
    show_url = link.show_url
    try:
        if show_progress:
            ## FIXME: the URL can get really long in this message:
            if total_length:
                logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
            else:
                logger.start_progress('Downloading %s (unknown size): ' % show_url)
        else:
            logger.notify('Downloading %s' % show_url)
        logger.info('Downloading from URL %s' % link)

        def resp_read(chunk_size):
            try:
                # Special case for urllib3.
                try:
                    for chunk in resp.raw.stream(
                            chunk_size, decode_content=False):
                        yield chunk
                except IncompleteRead as e:
                    raise ChunkedEncodingError(e)
            except AttributeError:
                # Standard file-like object.
                while True:
                    chunk = resp.raw.read(chunk_size)
                    if not chunk:
                        break
                    yield chunk

        for chunk in resp_read(4096):
            downloaded += len(chunk)
            if show_progress:
                if not total_length:
                    logger.show_progress('%s' % format_size(downloaded))
                else:
                    logger.show_progress('%3i%%  %s' % (100 * downloaded / total_length, format_size(downloaded)))
            if download_hash is not None:
                download_hash.update(chunk)
            fp.write(chunk)
        fp.close()
    finally:
        if show_progress:
            logger.end_progress('%s downloaded' % format_size(downloaded))
    return download_hash
Ejemplo n.º 15
0
    def check_destination(self, dest, url, rev_options, rev_display):
        """
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        """
        checkout = True
        prompt = False
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.info('%s in %s exists, and has correct URL (%s)' %
                                (self.repo_name.title(), display_path(dest),
                                 url))
                    logger.notify('Updating %s %s%s' %
                                  (display_path(dest), self.repo_name,
                                   rev_display))
                    self.update(dest, rev_options)
                else:
                    logger.warn('%s %s in %s exists with URL %s' %
                                (self.name, self.repo_name,
                                 display_path(dest), existing_url))
                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
                              ('s', 'i', 'w', 'b'))
            else:
                logger.warn('Directory %s already exists, '
                            'and is not a %s %s.' %
                            (dest, self.name, self.repo_name))
                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
        if prompt:
            logger.warn('The plan is to install the %s repository %s' %
                        (self.name, url))
            response = ask_path_exists('What to do?  %s' % prompt[0],
                                       prompt[1])

            if response == 's':
                logger.notify('Switching %s %s to %s%s' %
                              (self.repo_name, display_path(dest), url,
                               rev_display))
                self.switch(dest, url, rev_options)
            elif response == 'i':
                # do nothing
                pass
            elif response == 'w':
                logger.warn('Deleting %s' % display_path(dest))
                rmtree(dest)
                checkout = True
            elif response == 'b':
                dest_dir = backup_dir(dest)
                logger.warn('Backing up %s to %s'
                            % (display_path(dest), dest_dir))
                shutil.move(dest, dest_dir)
                checkout = True
        return checkout
Ejemplo n.º 16
0
 def pkg_info(self):
     p = FeedParser()
     data = self.egg_info_data('PKG-INFO')
     if not data:
         logger.warn(
             'No PKG-INFO file found in %s' %
             display_path(self.egg_info_path('PKG-INFO'))
         )
     p.feed(data or '')
     return p.close()
Ejemplo n.º 17
0
    def from_line(cls, name, comes_from=None, prereleases=None):
        """Creates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        """
        url = None
        name = name.strip()
        req = None
        path = os.path.normpath(os.path.abspath(name))
        link = None

        if is_url(name):
            link = Link(name)
        elif (os.path.isdir(path)
                and (os.path.sep in name or name.startswith('.'))):
            if not is_installable_dir(path):
                raise InstallationError(
                    "Directory %r is not installable. File 'setup.py' not "
                    "found." % name
                )
            link = Link(path_to_url(name))
        elif is_archive_file(path):
            if not os.path.isfile(path):
                logger.warn(
                    'Requirement %r looks like a filename, but the file does '
                    'not exist',
                    name
                )
            link = Link(path_to_url(name))

        # If the line has an egg= definition, but isn't editable, pull the
        # requirement out. Otherwise, assume the name is the req for the non
        # URL/path/archive case.
        if link and req is None:
            url = link.url_without_fragment
            # when fragment is None, this will become an 'unnamed' requirement
            req = link.egg_fragment

            # Handle relative file URLs
            if link.scheme == 'file' and re.search(r'\.\./', url):
                url = path_to_url(os.path.normpath(os.path.abspath(link.path)))

            # fail early for invalid or unsupported wheels
            if link.ext == wheel_ext:
                wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
                if not wheel.supported():
                    raise UnsupportedWheel(
                        "%s is not a supported wheel on this platform." %
                        wheel.filename
                    )

        else:
            req = name

        return cls(req, comes_from, url=url, prereleases=prereleases)
Ejemplo n.º 18
0
 def assert_source_matches_version(self):
     assert self.source_dir
     version = self.installed_version
     if version not in self.req:
         logger.warn(
             'Requested %s, but installing version %s' %
             (self, self.installed_version)
         )
     else:
         logger.debug(
             'Source in %s has version %s, which satisfies requirement %s' %
             (display_path(self.source_dir), version, self)
         )
Ejemplo n.º 19
0
    def get_url(self, location):
        # In cases where the source is in a subdirectory, not alongside setup.py
        # we have to look up in the location until we find a real setup.py
        orig_location = location
        while not os.path.exists(os.path.join(location, "setup.py")):
            last_location = location
            location = os.path.dirname(location)
            if location == last_location:
                # We've traversed up to the root of the filesystem without finding setup.py
                logger.warn("Could not find setup.py for directory %s (tried all parent directories)" % orig_location)
                return None

        return self._get_svn_url_rev(location)[0]
Ejemplo n.º 20
0
 def switch(self, dest, url, rev_options):
     repo_config = os.path.join(dest, self.dirname, 'hgrc')
     config = ConfigParser.SafeConfigParser()
     try:
         config.read(repo_config)
         config.set('paths', 'default', url)
         config_file = open(repo_config, 'w')
         config.write(config_file)
         config_file.close()
     except (OSError, ConfigParser.NoSectionError), e:
         logger.warn(
             'Could not switch Mercurial repository to %s: %s'
             % (url, e))
Ejemplo n.º 21
0
def unpack_file_url(link, location, download_dir=None):

    link_path = url_to_path(link.url_without_fragment)
    already_downloaded = False

    # If it's a url to a local directory
    if os.path.isdir(link_path):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True)
        return

    # if link has a hash, let's confirm it matches
    if link.hash:
        link_path_hash = _get_hash_from_file(link_path, link)
        _check_hash(link_path_hash, link)

    # If a download dir is specified, is the file already there and valid?
    if download_dir:
        download_path = os.path.join(download_dir, link.filename)
        if os.path.exists(download_path):
            content_type = mimetypes.guess_type(download_path)[0]
            logger.notify('File was already downloaded %s' % download_path)
            if link.hash:
                download_hash = _get_hash_from_file(download_path, link)
                try:
                    _check_hash(download_hash, link)
                    already_downloaded = True
                except HashMismatch:
                    logger.warn(
                        'Previously-downloaded file %s has bad hash, '
                        're-downloading.' % link_path
                        )
                    os.unlink(download_path)
            else:
                already_downloaded = True

    if already_downloaded:
        from_path = download_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded:
        _copy_file(from_path, download_dir, content_type, link)
Ejemplo n.º 22
0
Archivo: util.py Proyecto: tito97/Code
def untar_file(filename, location):
    """Untar the file (tar file located at filename) to the destination location"""
    if not os.path.exists(location):
        os.makedirs(location)
    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
        mode = 'r:gz'
    elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
        mode = 'r:bz2'
    elif filename.lower().endswith('.tar'):
        mode = 'r'
    else:
        logger.warn('Cannot determine compression type for file %s' % filename)
        mode = 'r:*'
    tar = tarfile.open(filename, mode)
    try:
        # note: python<=2.5 doesnt seem to know about pax headers, filter them
        leading = has_leading_dir([
            member.name for member in tar.getmembers()
            if member.name != 'pax_global_header'
        ])
        for member in tar.getmembers():
            fn = member.name
            if fn == 'pax_global_header':
                continue
            if leading:
                fn = split_leading_dir(fn)[1]
            path = os.path.join(location, fn)
            if member.isdir():
                if not os.path.exists(path):
                    os.makedirs(path)
            else:
                try:
                    fp = tar.extractfile(member)
                except (KeyError, AttributeError):
                    e = sys.exc_info()[1]
                    # Some corrupt tar files seem to produce this
                    # (specifically bad symlinks)
                    logger.warn(
                        'In the tar file %s the member %s is invalid: %s'
                        % (filename, member.name, e))
                    continue
                if not os.path.exists(os.path.dirname(path)):
                    os.makedirs(os.path.dirname(path))
                destfp = open(path, 'wb')
                try:
                    shutil.copyfileobj(fp, destfp)
                finally:
                    destfp.close()
                fp.close()
    finally:
        tar.close()
Ejemplo n.º 23
0
 def unzip_package(self, module_name, filename):
     zip_filename = os.path.dirname(filename)
     if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
         raise InstallationError(
             'Module %s (in %s) isn\'t located in a zip file in %s'
             % (module_name, filename, zip_filename))
     package_path = os.path.dirname(zip_filename)
     if not package_path in self.paths():
         logger.warn(
             'Unpacking %s into %s, but %s is not on sys.path'
             % (display_path(zip_filename), display_path(package_path),
                display_path(package_path)))
     logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
     if self.simulate:
         logger.notify('Skipping remaining operations because of --simulate')
         return
     logger.indent += 2
     try:
         ## FIXME: this should be undoable:
         zip = zipfile.ZipFile(zip_filename)
         to_save = []
         for info in zip.infolist():
             name = info.filename
             if name.startswith(module_name + os.path.sep):
                 content = zip.read(name)
                 dest = os.path.join(package_path, name)
                 if not os.path.exists(os.path.dirname(dest)):
                     os.makedirs(os.path.dirname(dest))
                 if not content and dest.endswith(os.path.sep):
                     if not os.path.exists(dest):
                         os.makedirs(dest)
                 else:
                     f = open(dest, 'wb')
                     f.write(content)
                     f.close()
             else:
                 to_save.append((name, zip.read(name)))
         zip.close()
         if not to_save:
             logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
             os.unlink(zip_filename)
             self.remove_filename_from_pth(zip_filename)
         else:
             logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
             zip = zipfile.ZipFile(zip_filename, 'w')
             for name, content in to_save:
                 zip.writestr(name, content)
             zip.close()
     finally:
         logger.indent -= 2
Ejemplo n.º 24
0
 def switch(self, dest, url, rev_options):
     repo_config = os.path.join(dest, self.dirname, "hgrc")
     config = ConfigParser.SafeConfigParser()
     try:
         config.read(repo_config)
         config.set("paths", "default", url)
         config_file = open(repo_config, "w")
         config.write(config_file)
         config_file.close()
     except (OSError, ConfigParser.NoSectionError):
         e = sys.exc_info()[1]
         logger.warn("Could not switch Mercurial repository to %s: %s" % (url, e))
     else:
         call_subprocess([self.cmd, "update", "-q"] + rev_options, cwd=dest)
Ejemplo n.º 25
0
def wheel_setuptools_support():
    """
    Return True if we have a setuptools that supports wheel.
    """
    fulfilled = False
    try:
        installed_setuptools = pkg_resources.get_distribution('setuptools')
        if installed_setuptools in setuptools_requirement:
            fulfilled = True
    except pkg_resources.DistributionNotFound:
        pass
    if not fulfilled:
        logger.warn("%s is required for wheel installs." % setuptools_requirement)
    return fulfilled
Ejemplo n.º 26
0
def _get_hash_from_file(target_file, link):
    try:
        download_hash = hashlib.new(link.hash_name)
    except (ValueError, TypeError):
        logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
        return None

    fp = open(target_file, 'rb')
    while True:
        chunk = fp.read(4096)
        if not chunk:
            break
        download_hash.update(chunk)
    fp.close()
    return download_hash
Ejemplo n.º 27
0
    def run(self, options, args):
        if not args:
            logger.warn('ERROR: Missing required argument (search query).')
            return
        query = args
        index_url = options.index

        pypi_hits = self.search(query, index_url)
        hits = transform_hits(pypi_hits)

        terminal_width = None
        if sys.stdout.isatty():
            terminal_width = get_terminal_size()[0]

        print_results(hits, terminal_width=terminal_width)
Ejemplo n.º 28
0
 def switch(self, dest, url, rev_options):
     repo_config = os.path.join(dest, self.dirname, 'hgrc')
     config = ConfigParser.SafeConfigParser()
     try:
         config.read(repo_config)
         config.set('paths', 'default', url)
         config_file = open(repo_config, 'w')
         config.write(config_file)
         config_file.close()
     except (OSError, ConfigParser.NoSectionError) as exc:
         logger.warn(
             'Could not switch Mercurial repository to %s: %s'
             % (url, exc))
     else:
         call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
Ejemplo n.º 29
0
    def get_revision(self, location):
        """
        Return the maximum revision for all files under a given location
        """
        # Note: taken from setuptools.command.egg_info
        revision = 0

        for base, dirs, files in os.walk(location):
            if self.dirname not in dirs:
                dirs[:] = []
                continue    # no sense walking uncontrolled subdirs
            dirs.remove(self.dirname)
            entries_fn = os.path.join(base, self.dirname, 'entries')
            if not os.path.exists(entries_fn):
                ## FIXME: should we warn?
                continue
            f = open(entries_fn)
            data = f.read()
            f.close()

            if data.startswith('8') or data.startswith('9') or data.startswith('10'):
                data = list(map(str.splitlines, data.split('\n\x0c\n')))
                del data[0][0]  # get rid of the '8'
                dirurl = data[0][3]
                revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
                if revs:
                    localrev = max(revs)
                else:
                    localrev = 0
            elif data.startswith('<?xml'):
                dirurl = _svn_xml_url_re.search(data).group(1)    # get repository URL
                revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
                if revs:
                    localrev = max(revs)
                else:
                    localrev = 0
            else:
                logger.warn("Unrecognized .svn/entries format; skipping %s", base)
                dirs[:] = []
                continue
            if base == location:
                base_url = dirurl+'/'   # save the root url
            elif not dirurl.startswith(base_url):
                dirs[:] = []
                continue    # not part of the same svn tree, skip it
            revision = max(revision, localrev)
        return revision
Ejemplo n.º 30
0
Archivo: git.py Proyecto: hedberg/pip
 def check_rev_options(self, rev, dest, rev_options):
     """Check the revision options before checkout to compensate that tags
     and branches may need origin/ as a prefix.
     Returns the SHA1 of the branch or tag if found.
     """
     revisions = self.get_tag_revs(dest)
     revisions.update(self.get_branch_revs(dest))
     inverse_revisions = dict((v, k) for k, v in revisions.iteritems())
     # Check if rev is a branch name
     origin_rev = 'origin/%s' % rev
     if origin_rev in inverse_revisions:
         return [inverse_revisions[origin_rev]]
     elif rev in inverse_revisions:
         return [inverse_revisions[rev]]
     else:
         logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
         return rev_options
Ejemplo n.º 31
0
def _download_url(resp, link, temp_location):
    fp = open(temp_location, 'wb')
    download_hash = None
    if link.hash and link.hash_name:
        try:
            download_hash = hashlib.new(link.hash_name)
        except ValueError:
            logger.warn("Unsupported hash name %s for package %s" %
                        (link.hash_name, link))

    try:
        total_length = int(resp.headers['content-length'])
    except (ValueError, KeyError, TypeError):
        total_length = 0
    downloaded = 0
    show_progress = total_length > 40 * 1000 or not total_length
    show_url = link.show_url
    try:
        if show_progress:
            # FIXME: the URL can get really long in this message:
            if total_length:
                logger.start_progress('Downloading %s (%s): ' %
                                      (show_url, format_size(total_length)))
            else:
                logger.start_progress('Downloading %s (unknown size): ' %
                                      show_url)
        else:
            logger.notify('Downloading %s' % show_url)
        logger.info('Downloading from URL %s' % link)

        def resp_read(chunk_size):
            try:
                # Special case for urllib3.
                try:
                    for chunk in resp.raw.stream(
                            chunk_size,
                            # We use decode_content=False here because we do
                            # want urllib3 to mess with the raw bytes we get
                            # from the server. If we decompress inside of
                            # urllib3 then we cannot verify the checksum
                            # because the checksum will be of the compressed
                            # file. This breakage will only occur if the
                            # server adds a Content-Encoding header, which
                            # depends on how the server was configured:
                            # - Some servers will notice that the file isn't a
                            #   compressible file and will leave the file alone
                            #   and with an empty Content-Encoding
                            # - Some servers will notice that the file is
                            #   already compressed and will leave the file
                            #   alone and will add a Content-Encoding: gzip
                            #   header
                            # - Some servers won't notice anything at all and
                            #   will take a file that's already been compressed
                            #   and compress it again and set the
                            #   Content-Encoding: gzip header
                            #
                            # By setting this not to decode automatically we
                            # hope to eliminate problems with the second case.
                            decode_content=False):
                        yield chunk
                except IncompleteRead as e:
                    raise ChunkedEncodingError(e)
            except AttributeError:
                # Standard file-like object.
                while True:
                    chunk = resp.raw.read(chunk_size)
                    if not chunk:
                        break
                    yield chunk

        for chunk in resp_read(4096):
            downloaded += len(chunk)
            if show_progress:
                if not total_length:
                    logger.show_progress('%s' % format_size(downloaded))
                else:
                    logger.show_progress('%3i%%  %s' %
                                         (100 * downloaded / total_length,
                                          format_size(downloaded)))
            if download_hash is not None:
                download_hash.update(chunk)
            fp.write(chunk)
        fp.close()
    finally:
        if show_progress:
            logger.end_progress('%s downloaded' % format_size(downloaded))
    return download_hash
Ejemplo n.º 32
0
def unpack_http_url(link, location, download_cache, download_dir=None,
                    session=None):
    if session is None:
        session = PipSession()

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
    temp_location = None
    target_url = link.url.split('#', 1)[0]
    already_cached = False
    cache_file = None
    cache_content_type_file = None
    download_hash = None

    # If a download cache is specified, is the file cached there?
    if download_cache:
        cache_file = os.path.join(download_cache,
                                   urllib.quote(target_url, ''))
        cache_content_type_file = cache_file + '.content-type'
        already_cached = (
            os.path.exists(cache_file) and
            os.path.exists(cache_content_type_file)
            )
        if not os.path.isdir(download_cache):
            create_download_cache_folder(download_cache)

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded = None
    if download_dir:
        already_downloaded = os.path.join(download_dir, link.filename)
        if not os.path.exists(already_downloaded):
            already_downloaded = None

    # If already downloaded, does it's hash match?
    if already_downloaded:
        temp_location = already_downloaded
        content_type = mimetypes.guess_type(already_downloaded)[0]
        logger.notify('File was already downloaded %s' % already_downloaded)
        if link.hash:
            download_hash = _get_hash_from_file(temp_location, link)
            try:
                _check_hash(download_hash, link)
            except HashMismatch:
                logger.warn(
                    'Previously-downloaded file %s has bad hash, '
                    're-downloading.' % temp_location
                    )
                temp_location = None
                os.unlink(already_downloaded)
                already_downloaded = None

    # If not a valid download, let's confirm the cached file is valid
    if already_cached and not temp_location:
        with open(cache_content_type_file) as fp:
            content_type = fp.read().strip()
        temp_location = cache_file
        logger.notify('Using download cache from %s' % cache_file)
        if link.hash and link.hash_name:
            download_hash = _get_hash_from_file(cache_file, link)
            try:
                _check_hash(download_hash, link)
            except HashMismatch:
                logger.warn(
                    'Cached file %s has bad hash, '
                    're-downloading.' % temp_location
                    )
                temp_location = None
                os.unlink(cache_file)
                os.unlink(cache_content_type_file)
                already_cached = False

    # We don't have either a cached or a downloaded copy
    # let's download to a tmp dir
    if not temp_location:
        try:
            resp = session.get(target_url, stream=True)
            resp.raise_for_status()
        except requests.HTTPError as exc:
            logger.fatal("HTTP error %s while getting %s" %
                         (exc.response.status_code, link))
            raise

        content_type = resp.headers.get('content-type', '')
        filename = link.filename  # fallback
        # Have a look at the Content-Disposition header for a better guess
        content_disposition = resp.headers.get('content-disposition')
        if content_disposition:
            type, params = cgi.parse_header(content_disposition)
            # We use ``or`` here because we don't want to use an "empty" value
            # from the filename param.
            filename = params.get('filename') or filename
        ext = splitext(filename)[1]
        if not ext:
            ext = mimetypes.guess_extension(content_type)
            if ext:
                filename += ext
        if not ext and link.url != resp.url:
            ext = os.path.splitext(resp.url)[1]
            if ext:
                filename += ext
        temp_location = os.path.join(temp_dir, filename)
        download_hash = _download_url(resp, link, temp_location)
        if link.hash and link.hash_name:
            _check_hash(download_hash, link)

    # a download dir is specified; let's copy the archive there
    if download_dir and not already_downloaded:
        _copy_file(temp_location, download_dir, content_type, link)

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(temp_location, location, content_type, link)

    # if using a download cache, cache it, if needed
    if cache_file and not already_cached:
        cache_download(cache_file, temp_location, content_type)

    if not (already_cached or already_downloaded):
        os.unlink(temp_location)

    os.rmdir(temp_dir)
Ejemplo n.º 33
0
    def install(self, install_options, global_options=(), root=None):
        if self.editable:
            self.install_editable(install_options, global_options)
            return
        if self.is_wheel:
            version = pip.wheel.wheel_version(self.source_dir)
            pip.wheel.check_compatibility(version, self.name)

            self.move_wheel_files(self.source_dir, root=root)
            self.install_succeeded = True
            return

        temp_location = tempfile.mkdtemp('-record', 'pip-')
        record_filename = os.path.join(temp_location, 'install-record.txt')
        try:
            install_args = [sys.executable]
            install_args.append('-c')
            install_args.append(
                "import setuptools, tokenize;__file__=%r;"
                "exec(compile(getattr(tokenize, 'open', open)(__file__).read()"
                ".replace('\\r\\n', '\\n'), __file__, 'exec'))" %
                self.setup_py)
            install_args += list(global_options) + \
                ['install', '--record', record_filename]

            if not self.as_egg:
                install_args += ['--single-version-externally-managed']

            if root is not None:
                install_args += ['--root', root]

            if self.pycompile:
                install_args += ["--compile"]
            else:
                install_args += ["--no-compile"]

            if running_under_virtualenv():
                # FIXME: I'm not sure if this is a reasonable location;
                # probably not but we can't put it in the default location, as
                # that is a virtualenv symlink that isn't writable
                install_args += [
                    '--install-headers',
                    os.path.join(sys.prefix, 'include', 'site',
                                 'python' + get_python_version())
                ]
            logger.notify('Running setup.py install for %s' % self.name)
            logger.indent += 2
            try:
                call_subprocess(
                    install_args + install_options,
                    cwd=self.source_dir,
                    filter_stdout=self._filter_install,
                    show_stdout=False,
                )
            finally:
                logger.indent -= 2
            if not os.path.exists(record_filename):
                logger.notify('Record file %s not found' % record_filename)
                return
            self.install_succeeded = True
            if self.as_egg:
                # there's no --always-unzip option we can pass to install
                # command so we unable to save the installed-files.txt
                return

            def prepend_root(path):
                if root is None or not os.path.isabs(path):
                    return path
                else:
                    return change_root(root, path)

            f = open(record_filename)
            for line in f:
                line = line.strip()
                if line.endswith('.egg-info'):
                    egg_info_dir = prepend_root(line)
                    break
            else:
                logger.warn(
                    'Could not find .egg-info directory in install record for '
                    '%s' % self)
                f.close()
                # FIXME: put the record somewhere
                # FIXME: should this be an error?
                return
            f.close()
            new_lines = []
            f = open(record_filename)
            for line in f:
                filename = line.strip()
                if os.path.isdir(filename):
                    filename += os.path.sep
                new_lines.append(
                    make_path_relative(prepend_root(filename), egg_info_dir))
            f.close()
            f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
            f.write('\n'.join(new_lines) + '\n')
            f.close()
        finally:
            if os.path.exists(record_filename):
                os.remove(record_filename)
            os.rmdir(temp_location)
Ejemplo n.º 34
0
                    level, line = level
                logger.log(level, line)
                if not logger.stdout_level_matches(level):
                    logger.show_progress()
            else:
                logger.info(line)
    else:
        returned_stdout, returned_stderr = proc.communicate()
        all_output = [returned_stdout or '']
    proc.wait()
    if proc.returncode:
        if raise_on_returncode:
            if all_output:
                logger.notify('Complete output from command %s:' %
                              command_desc)
                logger.notify('\n'.join(all_output) +
                              '\n----------------------------------------')
            raise InstallationError("Command %s failed with error code %s" %
                                    (command_desc, proc.returncode))
        else:
            logger.warn("Command %s had error code %s" %
                        (command_desc, proc.returncode))
    if stdout is not None:
        return ''.join(all_output)


if __name__ == '__main__':
    exit = main()
    if exit:
        sys.exit(exit)
Ejemplo n.º 35
0
    def run(self, options, args):
        if options.download_dir:
            options.no_install = True
            options.ignore_installed = True
        options.build_dir = os.path.abspath(options.build_dir)
        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages are not visible in this virtualenv."
                )
            install_options.append('--user')
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if os.path.exists(options.target_dir) and not os.path.isdir(
                    options.target_dir):
                raise CommandError(
                    "Target path exists but is not a directory, will not continue."
                )
            install_options.append('--home=' + temp_target_dir)
        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
            index_urls = []

        finder = self._build_package_finder(options, index_urls)

        requirement_set = RequirementSet(
            build_dir=options.build_dir,
            src_dir=options.src_dir,
            download_dir=options.download_dir,
            download_cache=options.download_cache,
            upgrade=options.upgrade,
            as_egg=options.as_egg,
            ignore_installed=options.ignore_installed,
            ignore_dependencies=options.ignore_dependencies,
            force_reinstall=options.force_reinstall,
            use_user_site=options.use_user_site)
        for name in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(name, None))
        for name in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(
                    name, default_vcs=options.default_vcs))
        for filename in options.requirements:
            for req in parse_requirements(filename,
                                          finder=finder,
                                          options=options):
                requirement_set.add_requirement(req)
        if not requirement_set.has_requirements:
            opts = {'name': self.name}
            if options.find_links:
                msg = ('You must give at least one requirement to %(name)s '
                       '(maybe you meant "pip %(name)s %(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warn(msg)
            return

        if (options.use_user_site and sys.version_info < (2, 6)):
            raise InstallationError(
                '--user is only supported in Python version 2.6 and newer')

        import setuptools
        if (options.use_user_site and requirement_set.has_editables
                and not getattr(setuptools, '_distribute', False)):

            raise InstallationError(
                '--user --editable not supported with setuptools, use distribute'
            )

        if not options.no_download:
            requirement_set.prepare_files(finder,
                                          force_root_egg_info=self.bundle,
                                          bundle=self.bundle)
        else:
            requirement_set.locate_files()

        if not options.no_install and not self.bundle:
            requirement_set.install(install_options, global_options)
            installed = ' '.join(
                [req.name for req in requirement_set.successfully_installed])
            if installed:
                logger.notify('Successfully installed %s' % installed)
        elif not self.bundle:
            downloaded = ' '.join(
                [req.name for req in requirement_set.successfully_downloaded])
            if downloaded:
                logger.notify('Successfully downloaded %s' % downloaded)
        elif self.bundle:
            requirement_set.create_bundle(self.bundle_filename)
            logger.notify('Created bundle in %s' % self.bundle_filename)
        # Clean up
        if not options.no_install or options.download_dir:
            requirement_set.cleanup_files(bundle=self.bundle)
        if options.target_dir:
            if not os.path.exists(options.target_dir):
                os.makedirs(options.target_dir)
            lib_dir = home_lib(temp_target_dir)
            for item in os.listdir(lib_dir):
                shutil.move(os.path.join(lib_dir, item),
                            os.path.join(options.target_dir, item))
            shutil.rmtree(temp_target_dir)
        return requirement_set
Ejemplo n.º 36
0
def untar_file(filename, location):
    """Untar the file (tar file located at filename) to the destination location"""
    if not os.path.exists(location):
        os.makedirs(location)
    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
        mode = 'r:gz'
    elif filename.lower().endswith('.bz2') or filename.lower().endswith(
            '.tbz'):
        mode = 'r:bz2'
    elif filename.lower().endswith('.tar'):
        mode = 'r'
    else:
        logger.warn('Cannot determine compression type for file %s' % filename)
        mode = 'r:*'
    tar = tarfile.open(filename, mode)
    try:
        # note: python<=2.5 doesnt seem to know about pax headers, filter them
        leading = has_leading_dir([
            member.name for member in tar.getmembers()
            if member.name != 'pax_global_header'
        ])
        for member in tar.getmembers():
            fn = member.name
            if fn == 'pax_global_header':
                continue
            if leading:
                fn = split_leading_dir(fn)[1]
            path = os.path.join(location, fn)
            if member.isdir():
                if not os.path.exists(path):
                    os.makedirs(path)
            elif member.issym():
                try:
                    tar._extract_member(member, path)
                except:
                    e = sys.exc_info()[1]
                    # Some corrupt tar files seem to produce this
                    # (specifically bad symlinks)
                    logger.warn(
                        'In the tar file %s the member %s is invalid: %s' %
                        (filename, member.name, e))
                    continue
            else:
                try:
                    fp = tar.extractfile(member)
                except (KeyError, AttributeError):
                    e = sys.exc_info()[1]
                    # Some corrupt tar files seem to produce this
                    # (specifically bad symlinks)
                    logger.warn(
                        'In the tar file %s the member %s is invalid: %s' %
                        (filename, member.name, e))
                    continue
                if not os.path.exists(os.path.dirname(path)):
                    os.makedirs(os.path.dirname(path))
                destfp = open(path, 'wb')
                try:
                    shutil.copyfileobj(fp, destfp)
                finally:
                    destfp.close()
                fp.close()
    finally:
        tar.close()
Ejemplo n.º 37
0
 def register(self, cls):
     if not hasattr(cls, 'name'):
         logger.warn('Cannot register VCS %s' % cls.__name__)
         return
     if cls.name not in self._registry:
         self._registry[cls.name] = cls
Ejemplo n.º 38
0
    def find_requirement(self, req, upgrade):
        def mkurl_pypi_url(url):
            loc = posixpath.join(url, url_name)
            # For maximum compatibility with easy_install, ensure the path
            # ends in a trailing slash.  Although this isn't in the spec
            # (and PyPI can handle it without the slash) some other index
            # implementations might break if they relied on easy_install's behavior.
            if not loc.endswith('/'):
                loc = loc + '/'
            return loc

        url_name = req.url_name
        # Only check main index if index URL is given:
        main_index_url = None
        if self.index_urls:
            # Check that we have the url_name correctly spelled:
            main_index_url = Link(mkurl_pypi_url(self.index_urls[0]),
                                  trusted=True)
            # This will also cache the page, so it's okay that we get it again later:
            page = self._get_page(main_index_url, req)
            if page is None:
                url_name = self._find_url_name(
                    Link(self.index_urls[0], trusted=True), url_name,
                    req) or req.url_name

        # Combine index URLs with mirror URLs here to allow
        # adding more index URLs from requirements files
        all_index_urls = self.index_urls + self.mirror_urls

        if url_name is not None:
            locations = [mkurl_pypi_url(url)
                         for url in all_index_urls] + self.find_links
        else:
            locations = list(self.find_links)
        for version in req.absolute_versions:
            if url_name is not None and main_index_url is not None:
                locations = [posixpath.join(main_index_url.url, version)
                             ] + locations

        file_locations, url_locations = self._sort_locations(locations)
        _flocations, _ulocations = self._sort_locations(self.dependency_links)
        file_locations.extend(_flocations)

        # We trust every url that the user has given us whether it was given
        #   via --index-url, --user-mirrors/--mirror, or --find-links or a
        #   default option thereof
        locations = [Link(url, trusted=True) for url in url_locations]

        # We explicitly do not trust links that came from dependency_links
        locations.extend([Link(url) for url in _ulocations])

        logger.debug('URLs to search for versions for %s:' % req)
        for location in locations:
            logger.debug('* %s' % location)
        found_versions = []
        found_versions.extend(
            self._package_versions(
                # We trust every directly linked archive in find_links
                [Link(url, '-f', trusted=True) for url in self.find_links],
                req.name.lower()))
        page_versions = []
        for page in self._get_pages(locations, req):
            logger.debug('Analyzing links from page %s' % page.url)
            logger.indent += 2
            try:
                page_versions.extend(
                    self._package_versions(page.links, req.name.lower()))
            finally:
                logger.indent -= 2
        dependency_versions = list(
            self._package_versions(
                [Link(url) for url in self.dependency_links],
                req.name.lower()))
        if dependency_versions:
            logger.info('dependency_links found: %s' % ', '.join(
                [link.url for parsed, link, version in dependency_versions]))
        file_versions = list(
            self._package_versions([Link(url) for url in file_locations],
                                   req.name.lower()))
        if not found_versions and not page_versions and not dependency_versions and not file_versions:
            logger.fatal(
                'Could not find any downloads that satisfy the requirement %s'
                % req)

            if self.need_warn_external:
                logger.warn("Some externally hosted files were ignored (use "
                            "--allow-external %s to allow)." % req.name)

            if self.need_warn_insecure:
                logger.warn("Some insecure and unverifiable files were ignored"
                            " (use --allow-insecure %s to allow)." % req.name)

            raise DistributionNotFound('No distributions at all found for %s' %
                                       req)
        installed_version = []
        if req.satisfied_by is not None:
            installed_version = [(req.satisfied_by.parsed_version, InfLink,
                                  req.satisfied_by.version)]
        if file_versions:
            file_versions.sort(reverse=True)
            logger.info('Local files found: %s' % ', '.join([
                url_to_path(link.url)
                for parsed, link, version in file_versions
            ]))
        #this is an intentional priority ordering
        all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions
        applicable_versions = []
        for (parsed_version, link, version) in all_versions:
            if version not in req.req:
                logger.info("Ignoring link %s, version %s doesn't match %s" %
                            (link, version, ','.join(
                                [''.join(s) for s in req.req.specs])))
                continue
            elif is_prerelease(version) and not (self.allow_all_prereleases
                                                 or req.prereleases):
                # If this version isn't the already installed one, then
                #   ignore it if it's a pre-release.
                if link is not InfLink:
                    logger.info(
                        "Ignoring link %s, version %s is a pre-release (use --pre to allow)."
                        % (link, version))
                    continue
            applicable_versions.append((parsed_version, link, version))
        applicable_versions = self._sort_versions(applicable_versions)
        existing_applicable = bool([
            link for parsed_version, link, version in applicable_versions
            if link is InfLink
        ])
        if not upgrade and existing_applicable:
            if applicable_versions[0][1] is InfLink:
                logger.info(
                    'Existing installed version (%s) is most up-to-date and satisfies requirement'
                    % req.satisfied_by.version)
            else:
                logger.info(
                    'Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
                    % (req.satisfied_by.version, applicable_versions[0][2]))
            return None
        if not applicable_versions:
            logger.fatal(
                'Could not find a version that satisfies the requirement %s (from versions: %s)'
                % (req, ', '.join([
                    version for parsed_version, link, version in all_versions
                ])))

            if self.need_warn_external:
                logger.warn("Some externally hosted files were ignored (use "
                            "--allow-external to allow).")

            if self.need_warn_insecure:
                logger.warn("Some insecure and unverifiable files were ignored"
                            " (use --allow-insecure %s to allow)." % req.name)

            raise DistributionNotFound(
                'No distributions matching the version for %s' % req)
        if applicable_versions[0][1] is InfLink:
            # We have an existing version, and its the best version
            logger.info(
                'Installed version (%s) is most up-to-date (past versions: %s)'
                % (req.satisfied_by.version, ', '.join([
                    version for parsed_version, link, version in
                    applicable_versions[1:]
                ]) or 'none'))
            raise BestVersionAlreadyInstalled
        if len(applicable_versions) > 1:
            logger.info(
                'Using version %s (newest of versions: %s)' %
                (applicable_versions[0][2], ', '.join([
                    version
                    for parsed_version, link, version in applicable_versions
                ])))

        selected_version = applicable_versions[0][1]

        # TODO: Remove after 1.4 has been released
        if (selected_version.internal is not None
                and not selected_version.internal):
            logger.warn("You are installing an externally hosted file. Future "
                        "versions of pip will default to disallowing "
                        "externally hosted files.")

        if (selected_version.verifiable is not None
                and not selected_version.verifiable):
            logger.warn("You are installing a potentially insecure and "
                        "unverifiable file. Future versions of pip will "
                        "default to disallowing insecure files.")

        return selected_version
Ejemplo n.º 39
0
    def run(self, options, args):
        requirement = options.requirement
        find_links = options.find_links or []
        local_only = options.local
        ## FIXME: Obviously this should be settable:
        find_tags = False
        skip_match = None

        skip_regex = options.skip_requirements_regex
        if skip_regex:
            skip_match = re.compile(skip_regex)

        dependency_links = []

        f = sys.stdout

        for dist in pkg_resources.working_set:
            if dist.has_metadata('dependency_links.txt'):
                dependency_links.extend(
                    dist.get_metadata_lines('dependency_links.txt'))
        for link in find_links:
            if '#egg=' in link:
                dependency_links.append(link)
        for link in find_links:
            f.write('-f %s\n' % link)
        installations = {}
        for dist in get_installed_distributions(local_only=local_only):
            req = pip.FrozenRequirement.from_dist(dist,
                                                  dependency_links,
                                                  find_tags=find_tags)
            installations[req.name] = req
        if requirement:
            req_f = open(requirement)
            for line in req_f:
                if not line.strip() or line.strip().startswith('#'):
                    f.write(line)
                    continue
                if skip_match and skip_match.search(line):
                    f.write(line)
                    continue
                elif line.startswith('-e') or line.startswith('--editable'):
                    if line.startswith('-e'):
                        line = line[2:].strip()
                    else:
                        line = line[len('--editable'):].strip().lstrip('=')
                    line_req = InstallRequirement.from_editable(
                        line, default_vcs=options.default_vcs)
                elif (line.startswith('-r') or line.startswith('--requirement')
                      or line.startswith('-Z')
                      or line.startswith('--always-unzip')
                      or line.startswith('-f') or line.startswith('-i')
                      or line.startswith('--extra-index-url')
                      or line.startswith('--find-links')
                      or line.startswith('--index-url')):
                    f.write(line)
                    continue
                else:
                    line_req = InstallRequirement.from_line(line)
                if not line_req.name:
                    logger.notify(
                        "Skipping line because it's not clear what it would install: %s"
                        % line.strip())
                    logger.notify(
                        "  (add #egg=PackageName to the URL to avoid this warning)"
                    )
                    continue
                if line_req.name not in installations:
                    logger.warn(
                        "Requirement file contains %s, but that package is not installed"
                        % line.strip())
                    continue
                f.write(str(installations[line_req.name]))
                del installations[line_req.name]
            f.write(
                '## The following requirements were added by pip --freeze:\n')
        for installation in sorted(installations.values(),
                                   key=lambda x: x.name):
            f.write(str(installation))
Ejemplo n.º 40
0
    def find_requirement(self, req, upgrade):
        def mkurl_pypi_url(url):
            loc = posixpath.join(url, url_name)
            # For maximum compatibility with easy_install, ensure the path
            # ends in a trailing slash.  Although this isn't in the spec
            # (and PyPI can handle it without the slash) some other index
            # implementations might break if they relied on easy_install's behavior.
            if not loc.endswith('/'):
                loc = loc + '/'
            return loc

        url_name = req.url_name
        # Only check main index if index URL is given:
        main_index_url = None
        if self.index_urls:
            # Check that we have the url_name correctly spelled:
            main_index_url = Link(mkurl_pypi_url(self.index_urls[0]),
                                  trusted=True)
            # This will also cache the page, so it's okay that we get it again later:
            page = self._get_page(main_index_url, req)
            if page is None:
                url_name = self._find_url_name(
                    Link(self.index_urls[0], trusted=True), url_name,
                    req) or req.url_name

        if url_name is not None:
            locations = [mkurl_pypi_url(url)
                         for url in self.index_urls] + self.find_links
        else:
            locations = list(self.find_links)
        for version in req.absolute_versions:
            if url_name is not None and main_index_url is not None:
                locations = [posixpath.join(main_index_url.url, version)
                             ] + locations

        file_locations, url_locations = self._sort_locations(locations)
        _flocations, _ulocations = self._sort_locations(self.dependency_links)
        file_locations.extend(_flocations)

        # We trust every url that the user has given us whether it was given
        #   via --index-url or --find-links
        locations = [Link(url, trusted=True) for url in url_locations]

        # We explicitly do not trust links that came from dependency_links
        locations.extend([Link(url) for url in _ulocations])

        logger.debug('URLs to search for versions for %s:' % req)
        for location in locations:
            logger.debug('* %s' % location)

            # Determine if this url used a secure transport mechanism
            parsed = urlparse.urlparse(str(location))
            if parsed.scheme in INSECURE_SCHEMES:
                secure_schemes = INSECURE_SCHEMES[parsed.scheme]

                if len(secure_schemes) == 1:
                    ctx = (location, parsed.scheme, secure_schemes[0],
                           parsed.netloc)
                    logger.warn("%s uses an insecure transport scheme (%s). "
                                "Consider using %s if %s has it available" %
                                ctx)
                elif len(secure_schemes) > 1:
                    ctx = (location, parsed.scheme, ", ".join(secure_schemes),
                           parsed.netloc)
                    logger.warn("%s uses an insecure transport scheme (%s). "
                                "Consider using one of %s if %s has any of "
                                "them available" % ctx)
                else:
                    ctx = (location, parsed.scheme)
                    logger.warn("%s uses an insecure transport scheme (%s)." %
                                ctx)

        found_versions = []
        found_versions.extend(
            self._package_versions(
                # We trust every directly linked archive in find_links
                [Link(url, '-f', trusted=True) for url in self.find_links],
                req.name.lower()))
        page_versions = []
        for page in self._get_pages(locations, req):
            logger.debug('Analyzing links from page %s' % page.url)
            logger.indent += 2
            try:
                page_versions.extend(
                    self._package_versions(page.links, req.name.lower()))
            finally:
                logger.indent -= 2
        dependency_versions = list(
            self._package_versions(
                [Link(url) for url in self.dependency_links],
                req.name.lower()))
        if dependency_versions:
            logger.info('dependency_links found: %s' % ', '.join(
                [link.url for parsed, link, version in dependency_versions]))
        file_versions = list(
            self._package_versions([Link(url) for url in file_locations],
                                   req.name.lower()))
        if not found_versions and not page_versions and not dependency_versions and not file_versions:
            logger.fatal(
                'Could not find any downloads that satisfy the requirement %s'
                % req)

            if self.need_warn_external:
                logger.warn("Some externally hosted files were ignored (use "
                            "--allow-external %s to allow)." % req.name)

            if self.need_warn_unverified:
                logger.warn("Some insecure and unverifiable files were ignored"
                            " (use --allow-unverified %s to allow)." %
                            req.name)

            raise DistributionNotFound('No distributions at all found for %s' %
                                       req)
        installed_version = []
        if req.satisfied_by is not None:
            installed_version = [(req.satisfied_by.parsed_version,
                                  INSTALLED_VERSION, req.satisfied_by.version)]
        if file_versions:
            file_versions.sort(reverse=True)
            logger.info('Local files found: %s' % ', '.join([
                url_to_path(link.url)
                for parsed, link, version in file_versions
            ]))
        #this is an intentional priority ordering
        all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions
        applicable_versions = []
        for (parsed_version, link, version) in all_versions:
            if version not in req.req:
                logger.info("Ignoring link %s, version %s doesn't match %s" %
                            (link, version, ','.join(
                                [''.join(s) for s in req.req.specs])))
                continue
            elif is_prerelease(version) and not (self.allow_all_prereleases
                                                 or req.prereleases):
                # If this version isn't the already installed one, then
                #   ignore it if it's a pre-release.
                if link is not INSTALLED_VERSION:
                    logger.info(
                        "Ignoring link %s, version %s is a pre-release (use --pre to allow)."
                        % (link, version))
                    continue
            applicable_versions.append((parsed_version, link, version))
        applicable_versions = self._sort_versions(applicable_versions)
        existing_applicable = bool([
            link for parsed_version, link, version in applicable_versions
            if link is INSTALLED_VERSION
        ])
        if not upgrade and existing_applicable:
            if applicable_versions[0][1] is INSTALLED_VERSION:
                logger.info(
                    'Existing installed version (%s) is most up-to-date and satisfies requirement'
                    % req.satisfied_by.version)
            else:
                logger.info(
                    'Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
                    % (req.satisfied_by.version, applicable_versions[0][2]))
            return None
        if not applicable_versions:
            logger.fatal(
                'Could not find a version that satisfies the requirement %s (from versions: %s)'
                % (req, ', '.join([
                    version for parsed_version, link, version in all_versions
                ])))

            if self.need_warn_external:
                logger.warn("Some externally hosted files were ignored (use "
                            "--allow-external to allow).")

            if self.need_warn_unverified:
                logger.warn("Some insecure and unverifiable files were ignored"
                            " (use --allow-unverified %s to allow)." %
                            req.name)

            raise DistributionNotFound(
                'No distributions matching the version for %s' % req)
        if applicable_versions[0][1] is INSTALLED_VERSION:
            # We have an existing version, and its the best version
            logger.info(
                'Installed version (%s) is most up-to-date (past versions: %s)'
                % (req.satisfied_by.version, ', '.join([
                    version for parsed_version, link, version in
                    applicable_versions[1:]
                ]) or 'none'))
            raise BestVersionAlreadyInstalled
        if len(applicable_versions) > 1:
            logger.info(
                'Using version %s (newest of versions: %s)' %
                (applicable_versions[0][2], ', '.join([
                    version
                    for parsed_version, link, version in applicable_versions
                ])))

        selected_version = applicable_versions[0][1]

        if (selected_version.internal is not None
                and not selected_version.internal):
            logger.warn("%s an externally hosted file and may be "
                        "unreliable" % req.name)

        if (selected_version.verifiable is not None
                and not selected_version.verifiable):
            logger.warn("%s is potentially insecure and "
                        "unverifiable." % req.name)

        if selected_version._deprecated_regex:
            logger.deprecated(
                "1.7", "%s discovered using a deprecated method of parsing, "
                "in the future it will no longer be discovered" % req.name)

        return selected_version
Ejemplo n.º 41
0
    def run(self, options, args):
        if options.download_dir:
            options.no_install = True
            options.ignore_installed = True
        options.build_dir = os.path.abspath(options.build_dir)
        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages are not visible in this virtualenv."
                )
            install_options.append('--user')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if os.path.exists(options.target_dir) and not os.path.isdir(
                    options.target_dir):
                raise CommandError(
                    "Target path exists but is not a directory, will not continue."
                )
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
            index_urls = []

        if options.use_mirrors:
            logger.deprecated(
                "1.7", "--use-mirrors has been deprecated and will be removed"
                " in the future. Explicit uses of --index-url and/or "
                "--extra-index-url is suggested.")

        if options.mirrors:
            logger.deprecated(
                "1.7", "--mirrors has been deprecated and will be removed in "
                " the future. Explicit uses of --index-url and/or "
                "--extra-index-url is suggested.")
            index_urls += options.mirrors

        session = self._build_session(options)

        finder = self._build_package_finder(options, index_urls, session)

        requirement_set = RequirementSet(
            build_dir=options.build_dir,
            src_dir=options.src_dir,
            download_dir=options.download_dir,
            download_cache=options.download_cache,
            upgrade=options.upgrade,
            as_egg=options.as_egg,
            ignore_installed=options.ignore_installed,
            ignore_dependencies=options.ignore_dependencies,
            force_reinstall=options.force_reinstall,
            use_user_site=options.use_user_site,
            target_dir=temp_target_dir,
            session=session,
            pycompile=options.compile,
        )
        for name in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(name, None))
        for name in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(
                    name, default_vcs=options.default_vcs))
        for filename in options.requirements:
            for req in parse_requirements(filename,
                                          finder=finder,
                                          options=options,
                                          session=session):
                requirement_set.add_requirement(req)
        if not requirement_set.has_requirements:
            opts = {'name': self.name}
            if options.find_links:
                msg = ('You must give at least one requirement to %(name)s '
                       '(maybe you meant "pip %(name)s %(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warn(msg)
            return

        try:
            if not options.no_download:
                requirement_set.prepare_files(finder,
                                              force_root_egg_info=self.bundle,
                                              bundle=self.bundle)
            else:
                requirement_set.locate_files()

            if not options.no_install and not self.bundle:
                requirement_set.install(install_options,
                                        global_options,
                                        root=options.root_path)
                installed = ' '.join([
                    req.name for req in requirement_set.successfully_installed
                ])
                if installed:
                    logger.notify('Successfully installed %s' % installed)
            elif not self.bundle:
                downloaded = ' '.join([
                    req.name for req in requirement_set.successfully_downloaded
                ])
                if downloaded:
                    logger.notify('Successfully downloaded %s' % downloaded)
            elif self.bundle:
                requirement_set.create_bundle(self.bundle_filename)
                logger.notify('Created bundle in %s' % self.bundle_filename)
        except PreviousBuildDirError:
            options.no_clean = True
            raise
        finally:
            # Clean up
            if (not options.no_clean) and ((not options.no_install)
                                           or options.download_dir):
                requirement_set.cleanup_files(bundle=self.bundle)

        if options.target_dir:
            if not os.path.exists(options.target_dir):
                os.makedirs(options.target_dir)
            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            for item in os.listdir(lib_dir):
                shutil.move(os.path.join(lib_dir, item),
                            os.path.join(options.target_dir, item))
            shutil.rmtree(temp_target_dir)
        return requirement_set
Ejemplo n.º 42
0
def call_subprocess(cmd, show_stdout=True,
                    filter_stdout=None, cwd=None,
                    raise_on_returncode=True,
                    command_level=logger.DEBUG, command_desc=None,
                    extra_environ=None):
    if command_desc is None:
        cmd_parts = []
        for part in cmd:
            if ' ' in part or '\n' in part or '"' in part or "'" in part:
                part = '"%s"' % part.replace('"', '\\"')
            cmd_parts.append(part)
        command_desc = ' '.join(cmd_parts)
    if show_stdout:
        stdout = None
    else:
        stdout = subprocess.PIPE
    logger.log(command_level, "Running command %s" % command_desc)
    env = os.environ.copy()
    if extra_environ:
        env.update(extra_environ)
    try:
        proc = subprocess.Popen(
            cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
            cwd=cwd, env=env)
    except Exception:
        e = sys.exc_info()[1]
        logger.fatal(
            "Error %s while executing command %s" % (e, command_desc))
        raise
    all_output = []
    if stdout is not None:
        stdout = proc.stdout
        while 1:
            line = console_to_str(stdout.readline())
            if not line:
                break
            line = line.rstrip()
            all_output.append(line + '\n')
            if filter_stdout:
                level = filter_stdout(line)
                if isinstance(level, tuple):
                    level, line = level
                logger.log(level, line)
                if not logger.stdout_level_matches(level):
                    logger.show_progress()
            else:
                logger.info(line)
    else:
        returned_stdout, returned_stderr = proc.communicate()
        all_output = [returned_stdout or '']
    proc.wait()
    if proc.returncode:
        if raise_on_returncode:
            if all_output:
                logger.notify('Complete output from command %s:' % command_desc)
                logger.notify('\n'.join(all_output) + '\n----------------------------------------')
            raise InstallationError(
                "Command %s failed with error code %s in %s"
                % (command_desc, proc.returncode, cwd))
        else:
            logger.warn(
                "Command %s had error code %s in %s"
                % (command_desc, proc.returncode, cwd))
    if stdout is not None:
        return ''.join(all_output)
Ejemplo n.º 43
0
def untar_file(filename, location):
    """
    Untar the file (with path `filename`) to the destination `location`.
    All files are written based on system defaults and umask (i.e. permissions
    are not preserved), except that regular file members with any execute
    permissions (user, group, or world) have "chmod +x" applied after being
    written.  Note that for windows, any execute changes using os.chmod are
    no-ops per the python docs.
    """
    if not os.path.exists(location):
        os.makedirs(location)
    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
        mode = 'r:gz'
    elif (filename.lower().endswith('.bz2')
            or filename.lower().endswith('.tbz')):
        mode = 'r:bz2'
    elif filename.lower().endswith('.tar'):
        mode = 'r'
    else:
        logger.warn('Cannot determine compression type for file %s' % filename)
        mode = 'r:*'
    tar = tarfile.open(filename, mode)
    try:
        # note: python<=2.5 doesn't seem to know about pax headers, filter them
        leading = has_leading_dir([
            member.name for member in tar.getmembers()
            if member.name != 'pax_global_header'
        ])
        for member in tar.getmembers():
            fn = member.name
            if fn == 'pax_global_header':
                continue
            if leading:
                fn = split_leading_dir(fn)[1]
            path = os.path.join(location, fn)
            if member.isdir():
                if not os.path.exists(path):
                    os.makedirs(path)
            elif member.issym():
                try:
                    tar._extract_member(member, path)
                except Exception as exc:
                    # Some corrupt tar files seem to produce this
                    # (specifically bad symlinks)
                    logger.warn(
                        'In the tar file %s the member %s is invalid: %s'
                        % (filename, member.name, exc))
                    continue
            else:
                try:
                    fp = tar.extractfile(member)
                except (KeyError, AttributeError) as exc:
                    # Some corrupt tar files seem to produce this
                    # (specifically bad symlinks)
                    logger.warn(
                        'In the tar file %s the member %s is invalid: %s'
                        % (filename, member.name, exc))
                    continue
                if not os.path.exists(os.path.dirname(path)):
                    os.makedirs(os.path.dirname(path))
                destfp = open(path, 'wb')
                try:
                    shutil.copyfileobj(fp, destfp)
                finally:
                    destfp.close()
                fp.close()
                # member have any execute permissions for user/group/world?
                if member.mode & 0o111:
                    # make dest file have execute for user/group/world
                    # no-op on windows per python docs
                    os.chmod(path, (0o777 - current_umask() | 0o111))
    finally:
        tar.close()
Ejemplo n.º 44
0
def get_src_requirement(dist, location, find_tags):
    version_control = vcs.get_backend_from_location(location)
    if version_control:
        return version_control().get_src_requirement(dist, location, find_tags)
    logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
    return dist.as_requirement()
Ejemplo n.º 45
0
def unpack_http_url(link, location, download_dir=None, session=None):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'")

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
    temp_location = None
    target_url = link.url.split('#', 1)[0]

    download_hash = None

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded = None
    if download_dir:
        already_downloaded = os.path.join(download_dir, link.filename)
        if not os.path.exists(already_downloaded):
            already_downloaded = None

    # If already downloaded, does its hash match?
    if already_downloaded:
        temp_location = already_downloaded
        content_type = mimetypes.guess_type(already_downloaded)[0]
        logger.notify('File was already downloaded %s' % already_downloaded)
        if link.hash:
            download_hash = _get_hash_from_file(temp_location, link)
            try:
                _check_hash(download_hash, link)
            except HashMismatch:
                logger.warn('Previously-downloaded file %s has bad hash, '
                            're-downloading.' % temp_location)
                temp_location = None
                os.unlink(already_downloaded)
                already_downloaded = None

    # let's download to a tmp dir
    if not temp_location:
        try:
            resp = session.get(
                target_url,
                # We use Accept-Encoding: identity here because requests
                # defaults to accepting compressed responses. This breaks in
                # a variety of ways depending on how the server is configured.
                # - Some servers will notice that the file isn't a compressible
                #   file and will leave the file alone and with an empty
                #   Content-Encoding
                # - Some servers will notice that the file is already
                #   compressed and will leave the file alone and will add a
                #   Content-Encoding: gzip header
                # - Some servers won't notice anything at all and will take
                #   a file that's already been compressed and compress it again
                #   and set the Content-Encoding: gzip header
                # By setting this to request only the identity encoding We're
                # hoping to eliminate the third case. Hopefully there does not
                # exist a server which when given a file will notice it is
                # already compressed and that you're not asking for a
                # compressed file and will then decompress it before sending
                # because if that's the case I don't think it'll ever be
                # possible to make this work.
                headers={"Accept-Encoding": "identity"},
                stream=True,
            )
            resp.raise_for_status()
        except requests.HTTPError as exc:
            logger.fatal("HTTP error %s while getting %s" %
                         (exc.response.status_code, link))
            raise

        content_type = resp.headers.get('content-type', '')
        filename = link.filename  # fallback
        # Have a look at the Content-Disposition header for a better guess
        content_disposition = resp.headers.get('content-disposition')
        if content_disposition:
            type, params = cgi.parse_header(content_disposition)
            # We use ``or`` here because we don't want to use an "empty" value
            # from the filename param.
            filename = params.get('filename') or filename
        ext = splitext(filename)[1]
        if not ext:
            ext = mimetypes.guess_extension(content_type)
            if ext:
                filename += ext
        if not ext and link.url != resp.url:
            ext = os.path.splitext(resp.url)[1]
            if ext:
                filename += ext
        temp_location = os.path.join(temp_dir, filename)
        download_hash = _download_url(resp, link, temp_location)
        if link.hash and link.hash_name:
            _check_hash(download_hash, link)

    # a download dir is specified; let's copy the archive there
    if download_dir and not already_downloaded:
        _copy_file(temp_location, download_dir, content_type, link)

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(temp_location, location, content_type, link)

    if not already_downloaded:
        os.unlink(temp_location)

    os.rmdir(temp_dir)
Ejemplo n.º 46
0
def unpack_http_url(link, location, download_cache, download_dir=None):
    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
    temp_location = None
    target_url = link.url.split('#', 1)[0]

    already_cached = False
    cache_file = None
    cache_content_type_file = None
    download_hash = None
    if download_cache:
        cache_file = os.path.join(download_cache, urllib.quote(target_url, ''))
        cache_content_type_file = cache_file + '.content-type'
        already_cached = (os.path.exists(cache_file)
                          and os.path.exists(cache_content_type_file))
        if not os.path.isdir(download_cache):
            create_download_cache_folder(download_cache)

    already_downloaded = None
    if download_dir:
        already_downloaded = os.path.join(download_dir, link.filename)
        if not os.path.exists(already_downloaded):
            already_downloaded = None

    if already_downloaded:
        temp_location = already_downloaded
        content_type = mimetypes.guess_type(already_downloaded)[0]
        logger.notify('File was already downloaded %s' % already_downloaded)
        if link.hash:
            download_hash = _get_hash_from_file(temp_location, link)
            try:
                _check_hash(download_hash, link)
            except HashMismatch:
                logger.warn('Previously-downloaded file %s has bad hash, '
                            're-downloading.' % temp_location)
                temp_location = None
                os.unlink(already_downloaded)
                already_downloaded = None

    # We have a cached file, and we haven't already found a good downloaded copy
    if already_cached and not temp_location:
        with open(cache_content_type_file) as fp:
            content_type = fp.read().strip()
        temp_location = cache_file
        logger.notify('Using download cache from %s' % cache_file)
        if link.hash and link.hash_name:
            download_hash = _get_hash_from_file(cache_file, link)
            try:
                _check_hash(download_hash, link)
            except HashMismatch:
                logger.warn('Cached file %s has bad hash, '
                            're-downloading.' % temp_location)
                temp_location = None
                os.unlink(cache_file)
                os.unlink(cache_content_type_file)
                already_cached = False

    # We don't have either a cached or a downloaded copy
    if not temp_location:
        resp = _get_response_from_url(target_url, link)
        content_type = resp.info().get('content-type', '')
        filename = link.filename  # fallback
        # Have a look at the Content-Disposition header for a better guess
        content_disposition = resp.info().get('content-disposition')
        if content_disposition:
            type, params = cgi.parse_header(content_disposition)
            # We use ``or`` here because we don't want to use an "empty" value
            # from the filename param.
            filename = params.get('filename') or filename
        ext = splitext(filename)[1]
        if not ext:
            ext = mimetypes.guess_extension(content_type)
            if ext:
                filename += ext
        if not ext and link.url != geturl(resp):
            ext = os.path.splitext(geturl(resp))[1]
            if ext:
                filename += ext
        temp_location = os.path.join(temp_dir, filename)
        download_hash = _download_url(resp, link, temp_location)
        if link.hash and link.hash_name:
            _check_hash(download_hash, link)

    if download_dir and not already_downloaded:
        _copy_file(temp_location, download_dir, content_type, link)
    unpack_file(temp_location, location, content_type, link)
    if cache_file and not already_cached:
        cache_download(cache_file, temp_location, content_type)
    if not (already_cached or already_downloaded):
        os.unlink(temp_location)
    os.rmdir(temp_dir)
def _download_url(resp, link, temp_location):
    fp = open(temp_location, 'wb')
    download_hash = None
    if link.hash and link.hash_name:
        try:
            download_hash = hashlib.new(link.hash_name)
        except ValueError:
            logger.warn("Unsupported hash name %s for package %s" %
                        (link.hash_name, link))
    try:
        total_length = int(resp.headers['content-length'])
    except (ValueError, KeyError, TypeError):
        total_length = 0
    downloaded = 0
    show_progress = total_length > 40 * 1000 or not total_length
    show_url = link.show_url
    try:
        if show_progress:
            ## FIXME: the URL can get really long in this message:
            if total_length:
                logger.start_progress('Downloading %s (%s): ' %
                                      (show_url, format_size(total_length)))
            else:
                logger.start_progress('Downloading %s (unknown size): ' %
                                      show_url)
        else:
            logger.notify('Downloading %s' % show_url)
        logger.info('Downloading from URL %s' % link)

        def resp_read(chunk_size):
            try:
                # Special case for urllib3.
                try:
                    for chunk in resp.raw.stream(chunk_size,
                                                 decode_content=False):
                        yield chunk
                except IncompleteRead as e:
                    raise ChunkedEncodingError(e)
            except AttributeError:
                # Standard file-like object.
                while True:
                    chunk = resp.raw.read(chunk_size)
                    if not chunk:
                        break
                    yield chunk

        for chunk in resp_read(4096):
            downloaded += len(chunk)
            if show_progress:
                if not total_length:
                    logger.show_progress('%s' % format_size(downloaded))
                else:
                    logger.show_progress('%3i%%  %s' %
                                         (100 * downloaded / total_length,
                                          format_size(downloaded)))
            if download_hash is not None:
                download_hash.update(chunk)
            fp.write(chunk)
        fp.close()
    finally:
        if show_progress:
            logger.end_progress('%s downloaded' % format_size(downloaded))
    return download_hash