def correct_build_location(self):
     """If the build location was a temporary directory, this will move it
     to a new more permanent location"""
     if self.source_dir is not None:
         return
     assert self.req is not None
     assert self._temp_build_dir
     old_location = self._temp_build_dir
     new_build_dir = self._ideal_build_dir
     del self._ideal_build_dir
     if self.editable:
         name = self.name.lower()
     else:
         name = self.name
     new_location = os.path.join(new_build_dir, name)
     if not os.path.exists(new_build_dir):
         logger.debug('Creating directory %s', new_build_dir)
         _make_build_dir(new_build_dir)
     if os.path.exists(new_location):
         raise InstallationError(
             'A package already exists in %s; please remove it to continue'
             % display_path(new_location))
     logger.debug(
         'Moving package %s from %s to new location %s',
         self, display_path(old_location), display_path(new_location),
     )
     shutil.move(old_location, new_location)
     self._temp_build_dir = new_location
     self.source_dir = new_location
     self._egg_info_path = None
Beispiel #2
0
 def remove_filename_from_pth(self, filename):
     for pth in self.pth_files():
         with open(pth, 'r') as f:
             lines = f.readlines()
         new_lines = [
             l for l in lines if l.strip() != filename]
         if lines != new_lines:
             logger.debug(
                 'Removing reference to %s from .pth file %s',
                 display_path(filename),
                 display_path(pth),
             )
             if not [line for line in new_lines if line]:
                 logger.debug(
                     '%s file would be empty: deleting', display_path(pth)
                 )
                 if not self.simulate:
                     os.unlink(pth)
             else:
                 if not self.simulate:
                     with open(pth, 'wb') as f:
                         f.writelines(new_lines)
             return
     logger.warning(
         'Cannot find a reference to %s in any .pth file',
         display_path(filename),
     )
 def get_info(self, location):
     """Returns (url, revision), where both are strings"""
     assert not location.rstrip('/').endswith(self.dirname), \
         'Bad directory: %s' % location
     output = self.run_command(
         ['info', location],
         show_stdout=False,
         extra_environ={'LANG': 'C'},
     )
     match = _svn_url_re.search(output)
     if not match:
         logger.warning(
             'Cannot determine URL of svn checkout %s',
             display_path(location),
         )
         logger.debug('Output that cannot be parsed: \n%s', output)
         return None, None
     url = match.group(1).strip()
     match = _svn_revision_re.search(output)
     if not match:
         logger.warning(
             'Cannot determine revision of svn checkout %s',
             display_path(location),
         )
         logger.debug('Output that cannot be parsed: \n%s', output)
         return url, None
     return url, match.group(1)
Beispiel #4
0
    def _correct_build_location(self):
        """Move self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.egg_info_path to fix the temporary build
        directory.
        """
        if self.source_dir is not None:
            return
        assert self.req is not None
        assert self._temp_build_dir
        assert self._ideal_build_dir
        old_location = self._temp_build_dir
        self._temp_build_dir = None
        new_location = self.build_location(self._ideal_build_dir)
        if os.path.exists(new_location):
            raise InstallationError(
                'A package already exists in %s; please remove it to continue'
                % display_path(new_location))
        logger.debug(
            'Moving package %s from %s to new location %s',
            self, display_path(old_location), display_path(new_location),
        )
        shutil.move(old_location, new_location)
        self._temp_build_dir = new_location
        self._ideal_build_dir = None
        self.source_dir = new_location
        self._egg_info_path = None
def _copy_file(filename, location, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
            display_path(download_location), ('i', 'w', 'b', 'a'))
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
            shutil.move(download_location, dest_file)
        elif response == 'a':
            sys.exit(-1)
    if copy:
        shutil.copy(filename, download_location)
        logger.info('Saved %s', display_path(download_location))
Beispiel #6
0
    def list(self, options, args):
        if args:
            raise InstallationError(
                'You cannot give an argument with --list')
        for path in sorted(self.paths()):
            if not os.path.exists(path):
                continue
            basename = os.path.basename(path.rstrip(os.path.sep))
            if os.path.isfile(path) and zipfile.is_zipfile(path):
                if os.path.dirname(path) not in self.paths():
                    logger.info('Zipped egg: %s', display_path(path))
                continue
            if (basename != 'site-packages'
                    and basename != 'dist-packages'
                    and not path.replace('\\', '/').endswith('lib/python')):
                continue
            logger.info('In %s:', display_path(path))

            with indent_log():
                zipped = []
                unzipped = []

                for filename in sorted(os.listdir(path)):
                    ext = os.path.splitext(filename)[1].lower()
                    if ext in ('.pth', '.egg-info', '.egg-link'):
                        continue
                    if ext == '.py':
                        logger.debug(
                            'Not displaying %s: not a package',
                            display_path(filename)
                        )
                        continue
                    full = os.path.join(path, filename)
                    if os.path.isdir(full):
                        unzipped.append((filename, self.count_package(full)))
                    elif zipfile.is_zipfile(full):
                        zipped.append(filename)
                    else:
                        logger.debug(
                            'Unknown file: %s', display_path(filename),
                        )
                if zipped:
                    logger.info('Zipped packages:')
                    with indent_log():
                        for filename in zipped:
                            logger.info(filename)
                else:
                    logger.info('No zipped packages.')
                if unzipped:
                    if options.sort_files:
                        unzipped.sort(key=lambda x: -x[1])
                    logger.info('Unzipped packages:')
                    with indent_log():
                        for filename, count in unzipped:
                            logger.info('%s  (%i files)', filename, count)
                else:
                    logger.info('No unzipped packages.')
Beispiel #7
0
    def zip_package(self, module_name, filename, no_pyc):
        logger.info('Zip %s (in %s)', module_name, display_path(filename))

        orig_filename = filename
        if filename.endswith('.egg'):
            dest_filename = filename
        else:
            dest_filename = filename + '.zip'

        with indent_log():
            # FIXME: I think this needs to be undoable:
            if filename == dest_filename:
                filename = backup_dir(orig_filename)
                logger.info(
                    'Moving %s aside to %s', orig_filename, filename,
                )
                if not self.simulate:
                    shutil.move(orig_filename, filename)
            try:
                logger.debug(
                    'Creating zip file in %s', display_path(dest_filename),
                )
                if not self.simulate:
                    zip = zipfile.ZipFile(dest_filename, 'w')
                    zip.writestr(module_name + '/', '')
                    for dirpath, dirnames, filenames in os.walk(filename):
                        if no_pyc:
                            filenames = [f for f in filenames
                                         if not f.lower().endswith('.pyc')]
                        for fns, is_dir in [
                                (dirnames, True), (filenames, False)]:
                            for fn in fns:
                                full = os.path.join(dirpath, fn)
                                dest = os.path.join(
                                    module_name,
                                    dirpath[len(filename):].lstrip(
                                        os.path.sep
                                    ),
                                    fn,
                                )
                                if is_dir:
                                    zip.writestr(dest + '/', '')
                                else:
                                    zip.write(full, dest)
                    zip.close()
                logger.debug(
                    'Removing old directory %s', display_path(filename),
                )
                if not self.simulate:
                    rmtree(filename)
            except:
                # FIXME: need to do an undo here
                raise
            # FIXME: should also be undone:
            self.add_filename_to_pth(dest_filename)
Beispiel #8
0
    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to %s)' % rev
        else:
            rev_options = ['origin/master']
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning %s%s to %s', url, rev_display, display_path(dest),
            )
            self.run_command(['clone', '-q', url, dest])

            if rev:
                rev_options = self.check_rev_options(rev, dest, rev_options)
                # Only do a checkout if rev_options differs from HEAD
                if not self.check_version(dest, rev_options):
                    self.run_command(
                        ['fetch', '-q', url] + rev_options,
                        cwd=dest,
                    )
                    self.run_command(
                        ['checkout', '-q', 'FETCH_HEAD'],
                        cwd=dest,
                    )

            #: repo may contain submodules
            self.update_submodules(dest)
Beispiel #9
0
 def pkg_info(self):
     p = FeedParser()
     data = self.egg_info_data("PKG-INFO")
     if not data:
         logger.warning("No PKG-INFO file found in %s", display_path(self.egg_info_path("PKG-INFO")))
     p.feed(data or "")
     return p.close()
Beispiel #10
0
    def check_destination(self, dest, url, rev_options, rev_display):
        """
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        """
        checkout = True
        prompt = False
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.debug(
                        "%s in %s exists, and has correct URL (%s)", self.repo_name.title(), display_path(dest), url
                    )
                    logger.info("Updating %s %s%s", display_path(dest), self.repo_name, rev_display)
                    self.update(dest, rev_options)
                else:
                    logger.warning(
                        "%s %s in %s exists with URL %s", self.name, self.repo_name, display_path(dest), existing_url
                    )
                    prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b"))
            else:
                logger.warning("Directory %s already exists, and is not a %s %s.", dest, self.name, self.repo_name)
                prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b"))
        if prompt:
            logger.warning("The plan is to install the %s repository %s", self.name, url)
            response = ask_path_exists("What to do?  %s" % prompt[0], prompt[1])

            if response == "s":
                logger.info("Switching %s %s to %s%s", self.repo_name, display_path(dest), url, rev_display)
                self.switch(dest, url, rev_options)
            elif response == "i":
                # do nothing
                pass
            elif response == "w":
                logger.warning("Deleting %s", display_path(dest))
                rmtree(dest)
                checkout = True
            elif response == "b":
                dest_dir = backup_dir(dest)
                logger.warning("Backing up %s to %s", display_path(dest), dest_dir)
                shutil.move(dest, dest_dir)
                checkout = True
        return checkout
Beispiel #11
0
 def archive(self, build_dir):
     assert self.source_dir
     create_archive = True
     archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
     archive_path = os.path.join(build_dir, archive_name)
     if os.path.exists(archive_path):
         response = ask_path_exists(
             'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
             display_path(archive_path), ('i', 'w', 'b', 'a'))
         if response == 'i':
             create_archive = False
         elif response == 'w':
             logger.warning('Deleting %s', display_path(archive_path))
             os.remove(archive_path)
         elif response == 'b':
             dest_file = backup_dir(archive_path)
             logger.warning(
                 'Backing up %s to %s',
                 display_path(archive_path),
                 display_path(dest_file),
             )
             shutil.move(archive_path, dest_file)
         elif response == 'a':
             sys.exit(-1)
     if create_archive:
         zip = zipfile.ZipFile(
             archive_path, 'w', zipfile.ZIP_DEFLATED,
             allowZip64=True
         )
         dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
         for dirpath, dirnames, filenames in os.walk(dir):
             if 'pip-egg-info' in dirnames:
                 dirnames.remove('pip-egg-info')
             for dirname in dirnames:
                 dirname = os.path.join(dirpath, dirname)
                 name = self._clean_zip_name(dirname, dir)
                 zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
                 zipdir.external_attr = 0x1ED << 16  # 0o755
                 zip.writestr(zipdir, '')
             for filename in filenames:
                 if filename == PIP_DELETE_MARKER_FILENAME:
                     continue
                 filename = os.path.join(dirpath, filename)
                 name = self._clean_zip_name(filename, dir)
                 zip.write(filename, self.name + '/' + name)
         zip.close()
         logger.info('Saved %s', display_path(archive_path))
 def pkg_info(self):
     p = FeedParser()
     data = self.egg_info_data('PKG-INFO')
     if not data:
         logger.warning(
             'No PKG-INFO file found in %s',
             display_path(self.egg_info_path('PKG-INFO')),
         )
     p.feed(data or '')
     return p.close()
Beispiel #13
0
def _copy_file(filename, location, content_type, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            "The file %s exists. (i)gnore, (w)ipe, (b)ackup " % display_path(download_location), ("i", "w", "b")
        )
        if response == "i":
            copy = False
        elif response == "w":
            logger.warning("Deleting %s", display_path(download_location))
            os.remove(download_location)
        elif response == "b":
            dest_file = backup_dir(download_location)
            logger.warning("Backing up %s to %s", display_path(download_location), display_path(dest_file))
            shutil.move(download_location, dest_file)
    if copy:
        shutil.copy(filename, download_location)
        logger.info("Saved %s", display_path(download_location))
Beispiel #14
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     rev_options = get_rev_options(url, rev)
     if rev:
         rev_display = " (to revision %s)" % rev
     else:
         rev_display = ""
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info("Checking out %s%s to %s", url, rev_display, display_path(dest))
         self.run_command(["checkout", "-q"] + rev_options + [url, dest])
Beispiel #15
0
 def is_download(self):
     if self.download_dir:
         self.download_dir = expanduser(self.download_dir)
         if os.path.exists(self.download_dir):
             return True
         else:
             logger.critical('Could not find download directory')
             raise InstallationError(
                 "Could not find or access download directory '%s'"
                 % display_path(self.download_dir))
     return False
Beispiel #16
0
 def assert_source_matches_version(self):
     assert self.source_dir
     version = self.pkg_info()["version"]
     if version not in self.req:
         logger.warning("Requested %s, but installing version %s", self, self.installed_version)
     else:
         logger.debug(
             "Source in %s has version %s, which satisfies requirement %s",
             display_path(self.source_dir),
             version,
             self,
         )
Beispiel #17
0
 def list(self, options, args):
     if args:
         raise InstallationError(
             'You cannot give an argument with --list')
     for path in sorted(self.paths()):
         if not os.path.exists(path):
             continue
         basename = os.path.basename(path.rstrip(os.path.sep))
         if os.path.isfile(path) and zipfile.is_zipfile(path):
             if os.path.dirname(path) not in self.paths():
                 logger.info('Zipped egg: %s', display_path(path))
             continue
Beispiel #18
0
 def archive(self, build_dir):
     assert self.source_dir
     create_archive = True
     archive_name = "%s-%s.zip" % (self.name, self.pkg_info()["version"])
     archive_path = os.path.join(build_dir, archive_name)
     if os.path.exists(archive_path):
         response = ask_path_exists(
             "The file %s exists. (i)gnore, (w)ipe, (b)ackup " % display_path(archive_path), ("i", "w", "b")
         )
         if response == "i":
             create_archive = False
         elif response == "w":
             logger.warning("Deleting %s", display_path(archive_path))
             os.remove(archive_path)
         elif response == "b":
             dest_file = backup_dir(archive_path)
             logger.warning("Backing up %s to %s", display_path(archive_path), display_path(dest_file))
             shutil.move(archive_path, dest_file)
     if create_archive:
         zip = zipfile.ZipFile(archive_path, "w", zipfile.ZIP_DEFLATED, allowZip64=True)
         dir = os.path.normcase(os.path.abspath(self.source_dir))
         for dirpath, dirnames, filenames in os.walk(dir):
             if "pip-egg-info" in dirnames:
                 dirnames.remove("pip-egg-info")
             for dirname in dirnames:
                 dirname = os.path.join(dirpath, dirname)
                 name = self._clean_zip_name(dirname, dir)
                 zipdir = zipfile.ZipInfo(self.name + "/" + name + "/")
                 zipdir.external_attr = 0x1ED << 16  # 0o755
                 zip.writestr(zipdir, "")
             for filename in filenames:
                 if filename == PIP_DELETE_MARKER_FILENAME:
                     continue
                 filename = os.path.join(dirpath, filename)
                 name = self._clean_zip_name(filename, dir)
                 zip.write(filename, self.name + "/" + name)
         zip.close()
         logger.info("Saved %s", display_path(archive_path))
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     rev_options = get_rev_options(url, rev)
     if rev:
         rev_display = ' (to revision %s)' % rev
     else:
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Checking out %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
         self.run_command(['checkout', '-q'] + rev_options + [url, dest])
Beispiel #20
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     if rev:
         rev_options = ['-r', rev]
         rev_display = ' (to revision %s)' % rev
     else:
         rev_options = []
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Checking out %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
Beispiel #21
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     rev_options = get_rev_options(url, rev)
     if rev:
         rev_display = ' (to revision %s)' % rev
     else:
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Checking out %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
         call_subprocess([self.cmd, 'checkout', '-q'] + rev_options +
                         [url, dest])
Beispiel #22
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     rev_options = get_rev_options(url, rev)
     url = self.remove_auth_from_url(url)
     if rev:
         rev_display = ' (to revision %s)' % rev
     else:
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Checking out %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
         self.run_command(['checkout', '-q'] + rev_options + [url, dest])
Beispiel #23
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     if rev:
         rev_options = ['-r', rev]
         rev_display = ' (to revision %s)' % rev
     else:
         rev_options = []
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Checking out %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
         self.run_command(['branch', '-q'] + rev_options + [url, dest])
Beispiel #24
0
 def assert_source_matches_version(self):
     assert self.source_dir
     version = self.pkg_info()['version']
     if self.req.specifier and version not in self.req.specifier:
         logger.warning(
             'Requested %s, but installing version %s',
             self,
             self.installed_version,
         )
     else:
         logger.debug(
             'Source in %s has version %s, which satisfies requirement %s',
             display_path(self.source_dir),
             version,
             self,
         )
Beispiel #25
0
 def assert_source_matches_version(self):
     assert self.source_dir
     version = self.pkg_info()['version']
     if self.req.specifier and version not in self.req.specifier:
         logger.warning(
             'Requested %s, but installing version %s',
             self,
             self.installed_version,
         )
     else:
         logger.debug(
             'Source in %s has version %s, which satisfies requirement %s',
             display_path(self.source_dir),
             version,
             self,
         )
Beispiel #26
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     if rev:
         rev_options = [rev]
         rev_display = ' (to revision %s)' % rev
     else:
         rev_options = []
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Cloning hg %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
         self.run_command(['clone', '--noupdate', '-q', url, dest])
         self.run_command(['update', '-q'] + rev_options, cwd=dest)
Beispiel #27
0
 def __str__(self):
     if self.req:
         s = str(self.req)
         if self.link:
             s += ' from %s' % self.link.url
     else:
         s = self.link.url if self.link else None
     if self.satisfied_by is not None:
         s += ' in %s' % display_path(self.satisfied_by.location)
     if self.comes_from:
         if isinstance(self.comes_from, six.string_types):
             comes_from = self.comes_from
         else:
             comes_from = self.comes_from.from_path()
         if comes_from:
             s += ' (from %s)' % comes_from
     return s
Beispiel #28
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     if rev:
         rev_options = ['-r', rev]
         rev_display = ' (to revision %s)' % rev
     else:
         rev_options = []
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Checking out %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
         call_subprocess(
             [self.cmd, 'branch', '-q'] + rev_options + [url, dest])
Beispiel #29
0
 def obtain(self, dest):
     url, rev = self.get_url_rev()
     if rev:
         rev_options = [rev]
         rev_display = ' (to revision %s)' % rev
     else:
         rev_options = []
         rev_display = ''
     if self.check_destination(dest, url, rev_options, rev_display):
         logger.info(
             'Cloning hg %s%s to %s',
             url,
             rev_display,
             display_path(dest),
         )
         self.run_command(['clone', '--noupdate', '-q', url, dest])
         self.run_command(['update', '-q'] + rev_options, cwd=dest)
 def __str__(self):
     if self.req:
         s = str(self.req)
         if self.url:
             s += ' from %s' % self.url
     else:
         s = self.url
     if self.satisfied_by is not None:
         s += ' in %s' % display_path(self.satisfied_by.location)
     if self.comes_from:
         if isinstance(self.comes_from, six.string_types):
             comes_from = self.comes_from
         else:
             comes_from = self.comes_from.from_path()
         if comes_from:
             s += ' (from %s)' % comes_from
     return s
Beispiel #31
0
 def __str__(self):
     if self.req:
         s = str(self.req)
         if self.link:
             s += " from %s" % self.link.url
     else:
         s = self.link.url if self.link else None
     if self.satisfied_by is not None:
         s += " in %s" % display_path(self.satisfied_by.location)
     if self.comes_from:
         if isinstance(self.comes_from, six.string_types):
             comes_from = self.comes_from
         else:
             comes_from = self.comes_from.from_path()
         if comes_from:
             s += " (from %s)" % comes_from
     return s
Beispiel #32
0
 def add_filename_to_pth(self, filename):
     path = os.path.dirname(filename)
     dest = filename + '.pth'
     if path not in self.paths():
         logger.warning(
             'Adding .pth file %s, but it is not on sys.path',
             display_path(dest),
         )
     if not self.simulate:
         if os.path.exists(dest):
             with open(dest) as f:
                 lines = f.readlines()
             if lines and not lines[-1].endswith('\n'):
                 lines[-1] += '\n'
             lines.append(filename + '\n')
         else:
             lines = [filename + '\n']
         with open(dest, 'wb') as f:
             f.writelines(lines)
Beispiel #33
0
 def add_filename_to_pth(self, filename):
     path = os.path.dirname(filename)
     dest = filename + '.pth'
     if path not in self.paths():
         logger.warning(
             'Adding .pth file %s, but it is not on sys.path',
             display_path(dest),
         )
     if not self.simulate:
         if os.path.exists(dest):
             with open(dest) as f:
                 lines = f.readlines()
             if lines and not lines[-1].endswith('\n'):
                 lines[-1] += '\n'
             lines.append(filename + '\n')
         else:
             lines = [filename + '\n']
         with open(dest, 'wb') as f:
             f.writelines(lines)
Beispiel #34
0
    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to %s)' % rev
        else:
            rev_options = ['origin/master']
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning %s%s to %s', url, rev_display, display_path(dest),
            )
            call_subprocess([self.cmd, 'clone', '-q', url, dest])

            if rev:
                rev_options = self.check_rev_options(rev, dest, rev_options)
                # Only do a checkout if rev_options differs from HEAD
                if not self.get_revision(dest).startswith(rev_options[0]):
                    call_subprocess(
                        [self.cmd, 'checkout', '-q'] + rev_options,
                        cwd=dest,
                    )
            #: repo may contain submodules
            self.update_submodules(dest)
Beispiel #35
0
    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to %s)' % rev
        else:
            rev_options = ['origin/master']
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning %s%s to %s', url, rev_display, display_path(dest),
            )
            self.run_command(['clone', '-q', url, dest])

            if rev:
                rev_options = self.check_rev_options(rev, dest, rev_options)
                # Only do a checkout if rev_options differs from HEAD
                if not self.check_version(dest, rev_options):
                    self.run_command(
                        ['checkout', '-q'] + rev_options,
                        cwd=dest,
                    )
            #: repo may contain submodules
            self.update_submodules(dest)
Beispiel #36
0
def _copy_file(filename, location, content_type, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
            display_path(download_location), ('i', 'w', 'b'))
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
Beispiel #37
0
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        """
        checkout = True
        prompt = False
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.debug(
                        '%s in %s exists, and has correct URL (%s)',
                        self.repo_name.title(),
                        display_path(dest),
                        url,
                    )
                    logger.info(
                        'Updating %s %s%s',
                        display_path(dest),
                        self.repo_name,
                        rev_display,
                    )
                    self.update(dest, rev_options)
                else:
                    logger.warning(
                        '%s %s in %s exists with URL %s',
                        self.name,
                        self.repo_name,
                        display_path(dest),
Beispiel #38
0
    def prepare_files(self, finder):
        """
        Prepare process. Create temp directories, download and/or unpack files.
        """
        from pip.index import Link

        unnamed = list(self.unnamed_requirements)
        reqs = list(self.requirements.values())
        while reqs or unnamed:
            if unnamed:
                req_to_install = unnamed.pop(0)
            else:
                req_to_install = reqs.pop(0)
            install = True
            best_installed = False
            not_found = None

            # ############################################# #
            # # Search for archive to fulfill requirement # #
            # ############################################# #

            if not self.ignore_installed and not req_to_install.editable:
                req_to_install.check_if_exists()
                if req_to_install.satisfied_by:
                    if self.upgrade:
                        if not self.force_reinstall and not req_to_install.url:
                            try:
                                url = finder.find_requirement(
                                    req_to_install, self.upgrade)
                            except BestVersionAlreadyInstalled:
                                best_installed = True
                                install = False
                            except DistributionNotFound as exc:
                                not_found = exc
                            else:
                                # Avoid the need to call find_requirement again
                                req_to_install.url = url.url

                        if not best_installed:
                            # don't uninstall conflict if user install and
                            # conflict is not user install
                            if not (self.use_user_site
                                    and not dist_in_usersite(
                                        req_to_install.satisfied_by
                                    )):
                                req_to_install.conflicts_with = \
                                    req_to_install.satisfied_by
                            req_to_install.satisfied_by = None
                    else:
                        install = False
                if req_to_install.satisfied_by:
                    if best_installed:
                        logger.info(
                            'Requirement already up-to-date: %s',
                            req_to_install,
                        )
                    else:
                        logger.info(
                            'Requirement already satisfied (use --upgrade to '
                            'upgrade): %s',
                            req_to_install,
                        )
            if req_to_install.editable:
                logger.info('Obtaining %s', req_to_install)
            elif install:
                if (req_to_install.url
                        and req_to_install.url.lower().startswith('file:')):
                    path = url_to_path(req_to_install.url)
                    logger.info('Processing %s', display_path(path))
                else:
                    logger.info('Collecting %s', req_to_install)

            with indent_log():
                # ################################ #
                # # vcs update or unpack archive # #
                # ################################ #

                is_wheel = False
                if req_to_install.editable:
                    if req_to_install.source_dir is None:
                        location = req_to_install.build_location(self.src_dir)
                        req_to_install.source_dir = location
                    else:
                        location = req_to_install.source_dir
                    if not os.path.exists(self.build_dir):
                        _make_build_dir(self.build_dir)
                    req_to_install.update_editable(not self.is_download)
                    if self.is_download:
                        req_to_install.run_egg_info()
                        req_to_install.archive(self.download_dir)
                    else:
                        req_to_install.run_egg_info()
                elif install:
                    # @@ if filesystem packages are not marked
                    # editable in a req, a non deterministic error
                    # occurs when the script attempts to unpack the
                    # build directory

                    # NB: This call can result in the creation of a temporary
                    # build directory
                    location = req_to_install.build_location(
                        self.build_dir,
                    )
                    unpack = True
                    url = None

                    # If a checkout exists, it's unwise to keep going.  version
                    # inconsistencies are logged later, but do not fail the
                    # installation.
                    if os.path.exists(os.path.join(location, 'setup.py')):
                        raise PreviousBuildDirError(
                            "pip can't proceed with requirements '%s' due to a"
                            " pre-existing build directory (%s). This is "
                            "likely due to a previous installation that failed"
                            ". pip is being responsible and not assuming it "
                            "can delete this. Please delete it and try again."
                            % (req_to_install, location)
                        )
                    else:
                        # FIXME: this won't upgrade when there's an existing
                        # package unpacked in `location`
                        if req_to_install.url is None:
                            if not_found:
                                raise not_found
                            url = finder.find_requirement(
                                req_to_install,
                                upgrade=self.upgrade,
                            )
                        else:
                            # FIXME: should req_to_install.url already be a
                            # link?
                            url = Link(req_to_install.url)
                            assert url
                        if url:
                            try:

                                if (
                                    url.filename.endswith(wheel_ext)
                                    and self.wheel_download_dir
                                ):
                                    # when doing 'pip wheel`
                                    download_dir = self.wheel_download_dir
                                    do_download = True
                                else:
                                    download_dir = self.download_dir
                                    do_download = self.is_download
                                unpack_url(
                                    url, location, download_dir,
                                    do_download, session=self.session,
                                )
                            except requests.HTTPError as exc:
                                logger.critical(
                                    'Could not install requirement %s because '
                                    'of error %s',
                                    req_to_install,
                                    exc,
                                )
                                raise InstallationError(
                                    'Could not install requirement %s because '
                                    'of HTTP error %s for URL %s' %
                                    (req_to_install, exc, url)
                                )
                        else:
                            unpack = False
                    if unpack:
                        is_wheel = url and url.filename.endswith(wheel_ext)
                        if self.is_download:
                            req_to_install.source_dir = location
                            if not is_wheel:
                                # FIXME:https://github.com/pypa/pip/issues/1112
                                req_to_install.run_egg_info()
                            if url and url.scheme in vcs.all_schemes:
                                req_to_install.archive(self.download_dir)
                        elif is_wheel:
                            req_to_install.source_dir = location
                            req_to_install.url = url.url
                        else:
                            req_to_install.source_dir = location
                            req_to_install.run_egg_info()
                            req_to_install.assert_source_matches_version()
                        # req_to_install.req is only avail after unpack for URL
                        # pkgs repeat check_if_exists to uninstall-on-upgrade
                        # (#14)
                        if not self.ignore_installed:
                            req_to_install.check_if_exists()
                        if req_to_install.satisfied_by:
                            if self.upgrade or self.ignore_installed:
                                # don't uninstall conflict if user install and
                                # conflict is not user install
                                if not (self.use_user_site
                                        and not dist_in_usersite(
                                            req_to_install.satisfied_by)):
                                    req_to_install.conflicts_with = \
                                        req_to_install.satisfied_by
                                req_to_install.satisfied_by = None
                            else:
                                logger.info(
                                    'Requirement already satisfied (use '
                                    '--upgrade to upgrade): %s',
                                    req_to_install,
                                )
                                install = False

                # ###################### #
                # # parse dependencies # #
                # ###################### #
                if (req_to_install.extras):
                    logger.debug(
                        "Installing extra requirements: %r",
                        ','.join(req_to_install.extras),
                    )

                if is_wheel:
                    dist = list(
                        pkg_resources.find_distributions(location)
                    )[0]
                else:  # sdists
                    if req_to_install.satisfied_by:
                        dist = req_to_install.satisfied_by
                    else:
                        dist = req_to_install.get_dist()
                    # FIXME: shouldn't be globally added:
                    if dist.has_metadata('dependency_links.txt'):
                        finder.add_dependency_links(
                            dist.get_metadata_lines('dependency_links.txt')
                        )

                if not self.ignore_dependencies:
                    for subreq in dist.requires(
                            req_to_install.extras):
                        if self.has_requirement(
                                subreq.project_name):
                            # FIXME: check for conflict
                            continue
                        subreq = InstallRequirement(
                            str(subreq),
                            req_to_install,
                            isolated=self.isolated,
                        )
                        reqs.append(subreq)
                        self.add_requirement(subreq)

                if not self.has_requirement(req_to_install.name):
                    # 'unnamed' requirements will get added here
                    self.add_requirement(req_to_install)

                # cleanup tmp src
                if (self.is_download or
                        req_to_install._temp_build_dir is not None):
                    self.reqs_to_cleanup.append(req_to_install)

                if install:
                    self.successfully_downloaded.append(req_to_install)
Beispiel #39
0
    def _prepare_file(self, finder, req_to_install):
        """Prepare a single requirements files.

        :return: A list of addition InstallRequirements to also install.
        """
        # Tell user what we are doing for this requirement:
        # obtain (editable), skipping, processing (local url), collecting
        # (remote url or package name)
        if req_to_install.constraint or req_to_install.prepared:
            return []

        req_to_install.prepared = True

        if req_to_install.editable:
            logger.info('Obtaining %s', req_to_install)
        else:
            # satisfied_by is only evaluated by calling _check_skip_installed,
            # so it must be None here.
            assert req_to_install.satisfied_by is None
            if not self.ignore_installed:
                skip_reason = self._check_skip_installed(
                    req_to_install, finder)

            if req_to_install.satisfied_by:
                assert skip_reason is not None, (
                    '_check_skip_installed returned None but '
                    'req_to_install.satisfied_by is set to %r' %
                    (req_to_install.satisfied_by, ))
                logger.info('Requirement already %s: %s', skip_reason,
                            req_to_install)
            else:
                if (req_to_install.link
                        and req_to_install.link.scheme == 'file'):
                    path = url_to_path(req_to_install.link.url)
                    logger.info('Processing %s', display_path(path))
                else:
                    logger.info('Collecting %s', req_to_install)

        with indent_log():
            # ################################ #
            # # vcs update or unpack archive # #
            # ################################ #
            if req_to_install.editable:
                req_to_install.ensure_has_source_dir(self.src_dir)
                req_to_install.update_editable(not self.is_download)
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    req_to_install.archive(self.download_dir)
            elif req_to_install.satisfied_by:
                abstract_dist = Installed(req_to_install)
            else:
                # @@ if filesystem packages are not marked
                # editable in a req, a non deterministic error
                # occurs when the script attempts to unpack the
                # build directory
                req_to_install.ensure_has_source_dir(self.build_dir)
                # If a checkout exists, it's unwise to keep going.  version
                # inconsistencies are logged later, but do not fail the
                # installation.
                # FIXME: this won't upgrade when there's an existing
                # package unpacked in `req_to_install.source_dir`
                if os.path.exists(
                        os.path.join(req_to_install.source_dir, 'setup.py')):
                    raise PreviousBuildDirError(
                        "pip can't proceed with requirements '%s' due to a"
                        " pre-existing build directory (%s). This is "
                        "likely due to a previous installation that failed"
                        ". pip is being responsible and not assuming it "
                        "can delete this. Please delete it and try again." %
                        (req_to_install, req_to_install.source_dir))
                req_to_install.populate_link(finder, self.upgrade)
                # We can't hit this spot and have populate_link return None.
                # req_to_install.satisfied_by is None here (because we're
                # guarded) and upgrade has no impact except when satisfied_by
                # is not None.
                # Then inside find_requirement existing_applicable -> False
                # If no new versions are found, DistributionNotFound is raised,
                # otherwise a result is guaranteed.
                assert req_to_install.link
                try:
                    download_dir = self.download_dir
                    # We always delete unpacked sdists after pip ran.
                    autodelete_unpacked = True
                    if req_to_install.link.is_wheel \
                            and self.wheel_download_dir:
                        # when doing 'pip wheel` we download wheels to a
                        # dedicated dir.
                        download_dir = self.wheel_download_dir
                    if req_to_install.link.is_wheel:
                        if download_dir:
                            # When downloading, we only unpack wheels to get
                            # metadata.
                            autodelete_unpacked = True
                        else:
                            # When installing a wheel, we use the unpacked
                            # wheel.
                            autodelete_unpacked = False
                    unpack_url(req_to_install.link,
                               req_to_install.source_dir,
                               download_dir,
                               autodelete_unpacked,
                               session=self.session)
                except requests.HTTPError as exc:
                    logger.critical(
                        'Could not install requirement %s because '
                        'of error %s',
                        req_to_install,
                        exc,
                    )
                    raise InstallationError(
                        'Could not install requirement %s because '
                        'of HTTP error %s for URL %s' %
                        (req_to_install, exc, req_to_install.link))
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    # Make a .zip of the source_dir we already created.
                    if req_to_install.link.scheme in vcs.all_schemes:
                        req_to_install.archive(self.download_dir)
                # req_to_install.req is only avail after unpack for URL
                # pkgs repeat check_if_exists to uninstall-on-upgrade
                # (#14)
                if not self.ignore_installed:
                    req_to_install.check_if_exists()
                if req_to_install.satisfied_by:
                    if self.upgrade or self.ignore_installed:
                        # don't uninstall conflict if user install and
                        # conflict is not user install
                        if not (self.use_user_site and not dist_in_usersite(
                                req_to_install.satisfied_by)):
                            req_to_install.conflicts_with = \
                                req_to_install.satisfied_by
                        req_to_install.satisfied_by = None
                    else:
                        logger.info(
                            'Requirement already satisfied (use '
                            '--upgrade to upgrade): %s',
                            req_to_install,
                        )

            # ###################### #
            # # parse dependencies # #
            # ###################### #
            dist = abstract_dist.dist(finder)
            more_reqs = []

            def add_req(subreq):
                sub_install_req = InstallRequirement(
                    str(subreq),
                    req_to_install,
                    isolated=self.isolated,
                    wheel_cache=self._wheel_cache,
                )
                more_reqs.extend(
                    self.add_requirement(sub_install_req, req_to_install.name))

            # We add req_to_install before its dependencies, so that we
            # can refer to it when adding dependencies.
            if not self.has_requirement(req_to_install.name):
                # 'unnamed' requirements will get added here
                self.add_requirement(req_to_install, None)

            if not self.ignore_dependencies:
                if (req_to_install.extras):
                    logger.debug(
                        "Installing extra requirements: %r",
                        ','.join(req_to_install.extras),
                    )
                missing_requested = sorted(
                    set(req_to_install.extras) - set(dist.extras))
                for missing in missing_requested:
                    logger.warning('%s does not provide the extra \'%s\'',
                                   dist, missing)

                available_requested = sorted(
                    set(dist.extras) & set(req_to_install.extras))
                for subreq in dist.requires(available_requested):
                    add_req(subreq)

            # cleanup tmp src
            self.reqs_to_cleanup.append(req_to_install)

            if not req_to_install.editable and not req_to_install.satisfied_by:
                # XXX: --no-install leads this to report 'Successfully
                # downloaded' for only non-editable reqs, even though we took
                # action on them.
                self.successfully_downloaded.append(req_to_install)

        return more_reqs
Beispiel #40
0
    def prepare_requirement(self, req_to_install, resolver, requirement_set):
        # ###################### #
        # # print log messages # #
        # ###################### #
        if req_to_install.editable:
            logger.info('Obtaining %s', req_to_install)
        else:
            # satisfied_by is only evaluated by calling _check_skip_installed,
            # so it must be None here.
            assert req_to_install.satisfied_by is None
            if not resolver.ignore_installed:
                skip_reason = resolver._check_skip_installed(req_to_install)

            if req_to_install.satisfied_by:
                assert skip_reason is not None, (
                    '_check_skip_installed returned None but '
                    'req_to_install.satisfied_by is set to %r'
                    % (req_to_install.satisfied_by,))
                logger.info(
                    'Requirement %s: %s (%s)', skip_reason,
                    req_to_install,
                    req_to_install.satisfied_by.version)
            else:
                if (req_to_install.link and
                        req_to_install.link.scheme == 'file'):
                    path = url_to_path(req_to_install.link.url)
                    logger.info('Processing %s', display_path(path))
                else:
                    logger.info('Collecting %s', req_to_install)

        assert resolver.require_hashes is not None, \
            "This should have been set in resolve()"

        with indent_log():
            # ################################ #
            # # vcs update or unpack archive # #
            # ################################ #
            if req_to_install.editable:
                if resolver.require_hashes:
                    raise InstallationError(
                        'The editable requirement %s cannot be installed when '
                        'requiring hashes, because there is no single file to '
                        'hash.' % req_to_install)
                req_to_install.ensure_has_source_dir(requirement_set.src_dir)
                req_to_install.update_editable(not requirement_set.is_download)
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if requirement_set.is_download:
                    req_to_install.archive(requirement_set.download_dir)
                req_to_install.check_if_exists()
            elif req_to_install.satisfied_by:
                if resolver.require_hashes:
                    logger.debug(
                        'Since it is already installed, we are trusting this '
                        'package without checking its hash. To ensure a '
                        'completely repeatable environment, install into an '
                        'empty virtualenv.')
                abstract_dist = Installed(req_to_install)
            else:
                # @@ if filesystem packages are not marked
                # editable in a req, a non deterministic error
                # occurs when the script attempts to unpack the
                # build directory
                req_to_install.ensure_has_source_dir(requirement_set.build_dir)
                # If a checkout exists, it's unwise to keep going.  version
                # inconsistencies are logged later, but do not fail the
                # installation.
                # FIXME: this won't upgrade when there's an existing
                # package unpacked in `req_to_install.source_dir`
                # package unpacked in `req_to_install.source_dir`
                if os.path.exists(
                        os.path.join(req_to_install.source_dir, 'setup.py')):
                    raise PreviousBuildDirError(
                        "pip can't proceed with requirements '%s' due to a"
                        " pre-existing build directory (%s). This is "
                        "likely due to a previous installation that failed"
                        ". pip is being responsible and not assuming it "
                        "can delete this. Please delete it and try again."
                        % (req_to_install, req_to_install.source_dir)
                    )
                req_to_install.populate_link(
                    resolver.finder,
                    resolver._is_upgrade_allowed(req_to_install),
                    resolver.require_hashes
                )
                # We can't hit this spot and have populate_link return None.
                # req_to_install.satisfied_by is None here (because we're
                # guarded) and upgrade has no impact except when satisfied_by
                # is not None.
                # Then inside find_requirement existing_applicable -> False
                # If no new versions are found, DistributionNotFound is raised,
                # otherwise a result is guaranteed.
                assert req_to_install.link
                link = req_to_install.link

                # Now that we have the real link, we can tell what kind of
                # requirements we have and raise some more informative errors
                # than otherwise. (For example, we can raise VcsHashUnsupported
                # for a VCS URL rather than HashMissing.)
                if resolver.require_hashes:
                    # We could check these first 2 conditions inside
                    # unpack_url and save repetition of conditions, but then
                    # we would report less-useful error messages for
                    # unhashable requirements, complaining that there's no
                    # hash provided.
                    if is_vcs_url(link):
                        raise VcsHashUnsupported()
                    elif is_file_url(link) and is_dir_url(link):
                        raise DirectoryUrlHashUnsupported()
                    if (not req_to_install.original_link and
                            not req_to_install.is_pinned):
                        # Unpinned packages are asking for trouble when a new
                        # version is uploaded. This isn't a security check, but
                        # it saves users a surprising hash mismatch in the
                        # future.
                        #
                        # file:/// URLs aren't pinnable, so don't complain
                        # about them not being pinned.
                        raise HashUnpinned()
                hashes = req_to_install.hashes(
                    trust_internet=not resolver.require_hashes)
                if resolver.require_hashes and not hashes:
                    # Known-good hashes are missing for this requirement, so
                    # shim it with a facade object that will provoke hash
                    # computation and then raise a HashMissing exception
                    # showing the user what the hash should be.
                    hashes = MissingHashes()

                try:
                    download_dir = requirement_set.download_dir
                    # We always delete unpacked sdists after pip ran.
                    autodelete_unpacked = True
                    if req_to_install.link.is_wheel \
                            and requirement_set.wheel_download_dir:
                        # when doing 'pip wheel` we download wheels to a
                        # dedicated dir.
                        download_dir = requirement_set.wheel_download_dir
                    if req_to_install.link.is_wheel:
                        if download_dir:
                            # When downloading, we only unpack wheels to get
                            # metadata.
                            autodelete_unpacked = True
                        else:
                            # When installing a wheel, we use the unpacked
                            # wheel.
                            autodelete_unpacked = False
                    unpack_url(
                        req_to_install.link, req_to_install.source_dir,
                        download_dir, autodelete_unpacked,
                        session=resolver.session, hashes=hashes,
                        progress_bar=requirement_set.progress_bar)
                except requests.HTTPError as exc:
                    logger.critical(
                        'Could not install requirement %s because '
                        'of error %s',
                        req_to_install,
                        exc,
                    )
                    raise InstallationError(
                        'Could not install requirement %s because '
                        'of HTTP error %s for URL %s' %
                        (req_to_install, exc, req_to_install.link)
                    )
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if requirement_set.is_download:
                    # Make a .zip of the source_dir we already created.
                    if req_to_install.link.scheme in vcs.all_schemes:
                        req_to_install.archive(requirement_set.download_dir)
                # req_to_install.req is only avail after unpack for URL
                # pkgs repeat check_if_exists to uninstall-on-upgrade
                # (#14)
                if not resolver.ignore_installed:
                    req_to_install.check_if_exists()
                if req_to_install.satisfied_by:
                    should_modify = (
                        resolver.upgrade_strategy != "to-satisfy-only" or
                        resolver.ignore_installed
                    )
                    if should_modify:
                        # don't uninstall conflict if user install and
                        # conflict is not user install
                        if not (resolver.use_user_site and not
                                dist_in_usersite(req_to_install.satisfied_by)):
                            req_to_install.conflicts_with = \
                                req_to_install.satisfied_by
                        req_to_install.satisfied_by = None
                    else:
                        logger.info(
                            'Requirement already satisfied (use '
                            '--upgrade to upgrade): %s',
                            req_to_install,
                        )
        return abstract_dist
Beispiel #41
0
    def _prepare_file(self,
                      finder,
                      req_to_install,
                      require_hashes=False,
                      ignore_dependencies=False):
        """Prepare a single requirements file.

        :return: A list of additional InstallRequirements to also install.
        """
        # Tell user what we are doing for this requirement:
        # obtain (editable), skipping, processing (local url), collecting
        # (remote url or package name)
        if req_to_install.constraint or req_to_install.prepared:
            return []

        req_to_install.prepared = True

        # ###################### #
        # # print log messages # #
        # ###################### #
        if req_to_install.editable:
            logger.info('Obtaining %s', req_to_install)
        else:
            # satisfied_by is only evaluated by calling _check_skip_installed,
            # so it must be None here.
            assert req_to_install.satisfied_by is None
            if not self.ignore_installed:
                skip_reason = self._check_skip_installed(
                    req_to_install, finder)

            if req_to_install.satisfied_by:
                assert skip_reason is not None, (
                    '_check_skip_installed returned None but '
                    'req_to_install.satisfied_by is set to %r'
                    % (req_to_install.satisfied_by,))
                logger.info(
                    'Requirement %s: %s (%s)', skip_reason,
                    req_to_install,
                    req_to_install.satisfied_by.version)
            else:
                if (req_to_install.link and
                        req_to_install.link.scheme == 'file'):
                    path = url_to_path(req_to_install.link.url)
                    logger.info('Processing %s', display_path(path))
                else:
                    logger.info('Collecting %s', req_to_install)

        with indent_log():
            # ################################ #
            # # vcs update or unpack archive # #
            # ################################ #
            if req_to_install.editable:
                if require_hashes:
                    raise InstallationError(
                        'The editable requirement %s cannot be installed when '
                        'requiring hashes, because there is no single file to '
                        'hash.' % req_to_install)
                req_to_install.ensure_has_source_dir(self.src_dir)
                req_to_install.update_editable(not self.is_download)
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    req_to_install.archive(self.download_dir)
                req_to_install.check_if_exists()
                in_toto_verify_wrapper(req_to_install.source_dir, toto_verify=self.toto_verify, toto_default=self.toto_default)
            elif req_to_install.satisfied_by:
                if require_hashes:
                    logger.debug(
                        'Since it is already installed, we are trusting this '
                        'package without checking its hash. To ensure a '
                        'completely repeatable environment, install into an '
                        'empty virtualenv.')
                abstract_dist = Installed(req_to_install)
            else:
                # @@ if filesystem packages are not marked
                # editable in a req, a non deterministic error
                # occurs when the script attempts to unpack the
                # build directory
                req_to_install.ensure_has_source_dir(self.build_dir)
                # If a checkout exists, it's unwise to keep going.  version
                # inconsistencies are logged later, but do not fail the
                # installation.
                # FIXME: this won't upgrade when there's an existing
                # package unpacked in `req_to_install.source_dir`
                if os.path.exists(
                        os.path.join(req_to_install.source_dir, 'setup.py')):
                    raise PreviousBuildDirError(
                        "pip can't proceed with requirements '%s' due to a"
                        " pre-existing build directory (%s). This is "
                        "likely due to a previous installation that failed"
                        ". pip is being responsible and not assuming it "
                        "can delete this. Please delete it and try again."
                        % (req_to_install, req_to_install.source_dir)
                    )
                req_to_install.populate_link(
                    finder,
                    self._is_upgrade_allowed(req_to_install),
                    require_hashes
                )
                # We can't hit this spot and have populate_link return None.
                # req_to_install.satisfied_by is None here (because we're
                # guarded) and upgrade has no impact except when satisfied_by
                # is not None.
                # Then inside find_requirement existing_applicable -> False
                # If no new versions are found, DistributionNotFound is raised,
                # otherwise a result is guaranteed.
                assert req_to_install.link
                link = req_to_install.link

                # Now that we have the real link, we can tell what kind of
                # requirements we have and raise some more informative errors
                # than otherwise. (For example, we can raise VcsHashUnsupported
                # for a VCS URL rather than HashMissing.)
                if require_hashes:
                    # We could check these first 2 conditions inside
                    # unpack_url and save repetition of conditions, but then
                    # we would report less-useful error messages for
                    # unhashable requirements, complaining that there's no
                    # hash provided.
                    if is_vcs_url(link):
                        raise VcsHashUnsupported()
                    elif is_file_url(link) and is_dir_url(link):
                        raise DirectoryUrlHashUnsupported()
                    if (not req_to_install.original_link and
                            not req_to_install.is_pinned):
                        # Unpinned packages are asking for trouble when a new
                        # version is uploaded. This isn't a security check, but
                        # it saves users a surprising hash mismatch in the
                        # future.
                        #
                        # file:/// URLs aren't pinnable, so don't complain
                        # about them not being pinned.
                        raise HashUnpinned()
                hashes = req_to_install.hashes(
                    trust_internet=not require_hashes)
                if require_hashes and not hashes:
                    # Known-good hashes are missing for this requirement, so
                    # shim it with a facade object that will provoke hash
                    # computation and then raise a HashMissing exception
                    # showing the user what the hash should be.
                    hashes = MissingHashes()

                try:
                    download_dir = self.download_dir
                    # We always delete unpacked sdists after pip ran.
                    autodelete_unpacked = True
                    if req_to_install.link.is_wheel \
                            and self.wheel_download_dir:
                        # when doing 'pip wheel` we download wheels to a
                        # dedicated dir.
                        download_dir = self.wheel_download_dir
                    if req_to_install.link.is_wheel:
                        if download_dir:
                            # When downloading, we only unpack wheels to get
                            # metadata.
                            autodelete_unpacked = True
                        else:
                            # When installing a wheel, we use the unpacked
                            # wheel.
                            autodelete_unpacked = False
                    #print "req.source_dir: %s, req_to_install.link: %s, download_dir: %s" % (req_to_install.source_dir, req_to_install.link, download_dir)
                    unpack_url(
                        req_to_install.link, req_to_install.source_dir,
                        download_dir, autodelete_unpacked,
                        session=self.session, hashes=hashes, toto_verify=self.toto_verify, toto_default=self.toto_default)


                except requests.HTTPError as exc:
                    logger.critical(
                        'Could not install requirement %s because '
                        'of error %s',
                        req_to_install,
                        exc,
                    )
                    raise InstallationError(
                        'Could not install requirement %s because '
                        'of HTTP error %s for URL %s' %
                        (req_to_install, exc, req_to_install.link)
                    )
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    # Make a .zip of the source_dir we already created.
                    if req_to_install.link.scheme in vcs.all_schemes:
                        req_to_install.archive(self.download_dir)
                # req_to_install.req is only avail after unpack for URL
                # pkgs repeat check_if_exists to uninstall-on-upgrade
                # (#14)
                if not self.ignore_installed:
                    req_to_install.check_if_exists()
                if req_to_install.satisfied_by:
                    if self.upgrade or self.ignore_installed:
                        # don't uninstall conflict if user install and
                        # conflict is not user install
                        if not (self.use_user_site and not
                                dist_in_usersite(
                                    req_to_install.satisfied_by)):
                            req_to_install.conflicts_with = \
                                req_to_install.satisfied_by
                        req_to_install.satisfied_by = None
                    else:
                        logger.info(
                            'Requirement already satisfied (use '
                            '--upgrade to upgrade): %s',
                            req_to_install,
                        )

            # ###################### #
            # # parse dependencies # #
            # ###################### #
            dist = abstract_dist.dist(finder)
            try:
                check_dist_requires_python(dist)
            except UnsupportedPythonVersion as e:
                if self.ignore_requires_python:
                    logger.warning(e.args[0])
                else:
                    req_to_install.remove_temporary_source()
                    raise
            more_reqs = []

            def add_req(subreq, extras_requested):
                sub_install_req = InstallRequirement(
                    str(subreq),
                    req_to_install,
                    isolated=self.isolated,
                    wheel_cache=self._wheel_cache,
                )
                more_reqs.extend(self.add_requirement(
                    sub_install_req, req_to_install.name,
                    extras_requested=extras_requested))

            # We add req_to_install before its dependencies, so that we
            # can refer to it when adding dependencies.
            if not self.has_requirement(req_to_install.name):
                # 'unnamed' requirements will get added here
                self.add_requirement(req_to_install, None)

            if not ignore_dependencies:
                if (req_to_install.extras):
                    logger.debug(
                        "Installing extra requirements: %r",
                        ','.join(req_to_install.extras),
                    )
                missing_requested = sorted(
                    set(req_to_install.extras) - set(dist.extras)
                )
                for missing in missing_requested:
                    logger.warning(
                        '%s does not provide the extra \'%s\'',
                        dist, missing
                    )

                available_requested = sorted(
                    set(dist.extras) & set(req_to_install.extras)
                )
                for subreq in dist.requires(available_requested):
                    add_req(subreq, extras_requested=available_requested)

            # cleanup tmp src
            self.reqs_to_cleanup.append(req_to_install)

            if not req_to_install.editable and not req_to_install.satisfied_by:
                # XXX: --no-install leads this to report 'Successfully
                # downloaded' for only non-editable reqs, even though we took
                # action on them.
                self.successfully_downloaded.append(req_to_install)
        return more_reqs
Beispiel #42
0
            'Bad directory: %s' % location
<<<<<<< HEAD
        output = self.run_command(
            ['info', location],
=======
        output = call_subprocess(
            [self.cmd, 'info', location],
>>>>>>> bde4533e29dfedadf6bcf9d451baa615bc828a59
            show_stdout=False,
            extra_environ={'LANG': 'C'},
        )
        match = _svn_url_re.search(output)
        if not match:
            logger.warning(
                'Cannot determine URL of svn checkout %s',
                display_path(location),
            )
            logger.debug('Output that cannot be parsed: \n%s', output)
            return None, None
        url = match.group(1).strip()
        match = _svn_revision_re.search(output)
        if not match:
            logger.warning(
                'Cannot determine revision of svn checkout %s',
                display_path(location),
            )
            logger.debug('Output that cannot be parsed: \n%s', output)
            return url, None
        return url, match.group(1)

    def export(self, location):
Beispiel #43
0
>>>>>>> bde4533e29dfedadf6bcf9d451baa615bc828a59

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to revision %s)' % rev
        else:
            rev_options = []
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning hg %s%s to %s',
                url,
                rev_display,
                display_path(dest),
            )
<<<<<<< HEAD
            self.run_command(['clone', '--noupdate', '-q', url, dest])
            self.run_command(['update', '-q'] + rev_options, cwd=dest)

    def get_url(self, location):
        url = self.run_command(
            ['showconfig', 'paths.default'],
=======
            call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
            call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)

    def get_url(self, location):
        url = call_subprocess(
            [self.cmd, 'showconfig', 'paths.default'],
Beispiel #44
0
        )
>>>>>>> bde4533e29dfedadf6bcf9d451baa615bc828a59
        #: update submodules
        self.update_submodules(dest)

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to %s)' % rev
        else:
            rev_options = ['origin/master']
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning %s%s to %s', url, rev_display, display_path(dest),
            )
<<<<<<< HEAD
            self.run_command(['clone', '-q', url, dest])
=======
            call_subprocess([self.cmd, 'clone', '-q', url, dest])
>>>>>>> bde4533e29dfedadf6bcf9d451baa615bc828a59

            if rev:
                rev_options = self.check_rev_options(rev, dest, rev_options)
                # Only do a checkout if rev_options differs from HEAD
                if not self.get_revision(dest).startswith(rev_options[0]):
<<<<<<< HEAD
                    self.run_command(
                        ['checkout', '-q'] + rev_options,
=======
Beispiel #45
0
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
            shutil.move(download_location, dest_file)
    if copy:
        shutil.copy(filename, download_location)
        logger.info('Saved %s', display_path(download_location))


def unpack_http_url(link, location, download_dir=None, session=None):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'"
        )

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir)
Beispiel #46
0
    def _prepare_file(self, finder, req_to_install):
        """Prepare a single requirements files.

        :return: A list of addition InstallRequirements to also install.
        """
        # Tell user what we are doing for this requirement:
        # obtain (editable), skipping, processing (local url), collecting
        # (remote url or package name)
        if req_to_install.constraint or req_to_install.prepared:
            return []

        req_to_install.prepared = True

        if req_to_install.editable:
            logger.info('Obtaining %s', req_to_install)
        else:
            # satisfied_by is only evaluated by calling _check_skip_installed,
            # so it must be None here.
            assert req_to_install.satisfied_by is None
            if not self.ignore_installed:
                skip_reason = self._check_skip_installed(
                    req_to_install, finder)

            if req_to_install.satisfied_by:
                assert skip_reason is not None, (
                    '_check_skip_installed returned None but '
                    'req_to_install.satisfied_by is set to %r'
                    % (req_to_install.satisfied_by,))
                logger.info(
                    'Requirement already %s: %s', skip_reason,
                    req_to_install)
            else:
                if (req_to_install.link and
                        req_to_install.link.scheme == 'file'):
                    path = url_to_path(req_to_install.link.url)
                    logger.info('Processing %s', display_path(path))
                else:
                    logger.info('Collecting %s', req_to_install)

        with indent_log():
            # ################################ #
            # # vcs update or unpack archive # #
            # ################################ #
            if req_to_install.editable:
                req_to_install.ensure_has_source_dir(self.src_dir)
                req_to_install.update_editable(not self.is_download)
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    req_to_install.archive(self.download_dir)
            elif req_to_install.satisfied_by:
                abstract_dist = Installed(req_to_install)
            else:
                # @@ if filesystem packages are not marked
                # editable in a req, a non deterministic error
                # occurs when the script attempts to unpack the
                # build directory
                req_to_install.ensure_has_source_dir(self.build_dir)
                # If a checkout exists, it's unwise to keep going.  version
                # inconsistencies are logged later, but do not fail the
                # installation.
                # FIXME: this won't upgrade when there's an existing
                # package unpacked in `req_to_install.source_dir`
                if os.path.exists(
                        os.path.join(req_to_install.source_dir, 'setup.py')):
                    raise PreviousBuildDirError(
                        "pip can't proceed with requirements '%s' due to a"
                        " pre-existing build directory (%s). This is "
                        "likely due to a previous installation that failed"
                        ". pip is being responsible and not assuming it "
                        "can delete this. Please delete it and try again."
                        % (req_to_install, req_to_install.source_dir)
                    )
                req_to_install.populate_link(finder, self.upgrade)
                # We can't hit this spot and have populate_link return None.
                # req_to_install.satisfied_by is None here (because we're
                # guarded) and upgrade has no impact except when satisfied_by
                # is not None.
                # Then inside find_requirement existing_applicable -> False
                # If no new versions are found, DistributionNotFound is raised,
                # otherwise a result is guaranteed.
                assert req_to_install.link
                try:
                    download_dir = self.download_dir
                    # We always delete unpacked sdists after pip ran.
                    autodelete_unpacked = True
                    if req_to_install.link.is_wheel \
                            and self.wheel_download_dir:
                        # when doing 'pip wheel` we download wheels to a
                        # dedicated dir.
                        download_dir = self.wheel_download_dir
                    if req_to_install.link.is_wheel:
                        if download_dir:
                            # When downloading, we only unpack wheels to get
                            # metadata.
                            autodelete_unpacked = True
                        else:
                            # When installing a wheel, we use the unpacked
                            # wheel.
                            autodelete_unpacked = False
                    unpack_url(
                        req_to_install.link, req_to_install.source_dir,
                        download_dir, autodelete_unpacked,
                        session=self.session)
                except requests.HTTPError as exc:
                    logger.critical(
                        'Could not install requirement %s because '
                        'of error %s',
                        req_to_install,
                        exc,
                    )
                    raise InstallationError(
                        'Could not install requirement %s because '
                        'of HTTP error %s for URL %s' %
                        (req_to_install, exc, req_to_install.link)
                    )
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    # Make a .zip of the source_dir we already created.
                    if req_to_install.link.scheme in vcs.all_schemes:
                        req_to_install.archive(self.download_dir)
                # req_to_install.req is only avail after unpack for URL
                # pkgs repeat check_if_exists to uninstall-on-upgrade
                # (#14)
                if not self.ignore_installed:
                    req_to_install.check_if_exists()
                if req_to_install.satisfied_by:
                    if self.upgrade or self.ignore_installed:
                        # don't uninstall conflict if user install and
                        # conflict is not user install
                        if not (self.use_user_site and not
                                dist_in_usersite(
                                    req_to_install.satisfied_by)):
                            req_to_install.conflicts_with = \
                                req_to_install.satisfied_by
                        req_to_install.satisfied_by = None
                    else:
                        logger.info(
                            'Requirement already satisfied (use '
                            '--upgrade to upgrade): %s',
                            req_to_install,
                        )

            # ###################### #
            # # parse dependencies # #
            # ###################### #
            dist = abstract_dist.dist(finder)
            more_reqs = []

            def add_req(subreq):
                sub_install_req = InstallRequirement(
                    str(subreq),
                    req_to_install,
                    isolated=self.isolated,
                    wheel_cache=self._wheel_cache,
                )
                more_reqs.extend(self.add_requirement(
                    sub_install_req, req_to_install.name))

            # We add req_to_install before its dependencies, so that we
            # can refer to it when adding dependencies.
            if not self.has_requirement(req_to_install.name):
                # 'unnamed' requirements will get added here
                self.add_requirement(req_to_install, None)

            if not self.ignore_dependencies:
                if (req_to_install.extras):
                    logger.debug(
                        "Installing extra requirements: %r",
                        ','.join(req_to_install.extras),
                    )
                missing_requested = sorted(
                    set(req_to_install.extras) - set(dist.extras)
                )
                for missing in missing_requested:
                    logger.warning(
                        '%s does not provide the extra \'%s\'',
                        dist, missing
                    )

                available_requested = sorted(
                    set(dist.extras) & set(req_to_install.extras)
                )
                for subreq in dist.requires(available_requested):
                    add_req(subreq)

            # cleanup tmp src
            self.reqs_to_cleanup.append(req_to_install)

            if not req_to_install.editable and not req_to_install.satisfied_by:
                # XXX: --no-install leads this to report 'Successfully
                # downloaded' for only non-editable reqs, even though we took
                # action on them.
                self.successfully_downloaded.append(req_to_install)

        return more_reqs
Beispiel #47
0
    def _prepare_linked_requirement(self, req, resolver):
        """Prepare a requirement that would be obtained from req.link
        """
        # TODO: Breakup into smaller functions
        if req.link and req.link.scheme == 'file':
            path = url_to_path(req.link.url)
            logger.info('Processing %s', display_path(path))
        else:
            logger.info('Collecting %s', req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
                raise PreviousBuildDirError(
                    "pip can't proceed with requirements '%s' due to a"
                    " pre-existing build directory (%s). This is "
                    "likely due to a previous installation that failed"
                    ". pip is being responsible and not assuming it "
                    "can delete this. Please delete it and try again." %
                    (req, req.source_dir))
            req.populate_link(resolver.finder,
                              resolver._is_upgrade_allowed(req),
                              resolver.require_hashes)
            # We can't hit this spot and have populate_link return None.
            # req.satisfied_by is None here (because we're
            # guarded) and upgrade has no impact except when satisfied_by
            # is not None.
            # Then inside find_requirement existing_applicable -> False
            # If no new versions are found, DistributionNotFound is raised,
            # otherwise a result is guaranteed.
            assert req.link
            link = req.link

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if resolver.require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if is_vcs_url(link):
                    raise VcsHashUnsupported()
                elif is_file_url(link) and is_dir_url(link):
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()
            hashes = req.hashes(trust_internet=not resolver.require_hashes)
            if resolver.require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            try:
                download_dir = self.download_dir
                # We always delete unpacked sdists after pip ran.
                autodelete_unpacked = True
                if req.link.is_wheel and self.wheel_download_dir:
                    # when doing 'pip wheel` we download wheels to a
                    # dedicated dir.
                    download_dir = self.wheel_download_dir
                if req.link.is_wheel:
                    if download_dir:
                        # When downloading, we only unpack wheels to get
                        # metadata.
                        autodelete_unpacked = True
                    else:
                        # When installing a wheel, we use the unpacked
                        # wheel.
                        autodelete_unpacked = False
                unpack_url(req.link,
                           req.source_dir,
                           download_dir,
                           autodelete_unpacked,
                           session=resolver.session,
                           hashes=hashes,
                           progress_bar=self.progress_bar)
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
                    'Could not install requirement %s because of HTTP '
                    'error %s for URL %s' % (req, exc, req.link))
            abstract_dist = make_abstract_dist(req)
            abstract_dist.prep_for_dist()
            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if req.link.scheme in vcs.all_schemes:
                    req.archive(self.download_dir)
            # req.req is only avail after unpack for URL
            # pkgs repeat check_if_exists to uninstall-on-upgrade
            # (#14)
            if not resolver.ignore_installed:
                req.check_if_exists()
            if req.satisfied_by:
                should_modify = (resolver.upgrade_strategy != "to-satisfy-only"
                                 or resolver.ignore_installed)
                if should_modify:
                    resolver._set_req_to_reinstall(req)
                else:
                    logger.info(
                        'Requirement already satisfied (use '
                        '--upgrade to upgrade): %s',
                        req,
                    )
        return abstract_dist
Beispiel #48
0
    def check_destination(self, dest, url, rev_options, rev_display):
        """
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        """
        checkout = True
        prompt = False
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.debug(
                        '%s in %s exists, and has correct URL (%s)',
                        self.repo_name.title(),
                        display_path(dest),
                        url,
                    )
                    if not self.check_version(dest, rev_options):
                        logger.info(
                            'Updating %s %s%s',
                            display_path(dest),
                            self.repo_name,
                            rev_display,
                        )
                        self.update(dest, rev_options)
                    else:
                        logger.info('Skipping because already up-to-date.')
                else:
                    logger.warning(
                        '%s %s in %s exists with URL %s',
                        self.name,
                        self.repo_name,
                        display_path(dest),
                        existing_url,
                    )
                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
                              ('s', 'i', 'w', 'b'))
            else:
                logger.warning(
                    'Directory %s already exists, and is not a %s %s.',
                    dest,
                    self.name,
                    self.repo_name,
                )
                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
        if prompt:
            logger.warning(
                'The plan is to install the %s repository %s',
                self.name,
                url,
            )
            response = ask_path_exists('What to do?  %s' % prompt[0],
                                       prompt[1])

            if response == 's':
                logger.info(
                    'Switching %s %s to %s%s',
                    self.repo_name,
                    display_path(dest),
                    url,
                    rev_display,
                )
                self.switch(dest, url, rev_options)
            elif response == 'i':
                # do nothing
                pass
            elif response == 'w':
                logger.warning('Deleting %s', display_path(dest))
                rmtree(dest)
                checkout = True
            elif response == 'b':
                dest_dir = backup_dir(dest)
                logger.warning(
                    'Backing up %s to %s',
                    display_path(dest),
                    dest_dir,
                )
                shutil.move(dest, dest_dir)
                checkout = True
            elif response == 'a':
                sys.exit(-1)
        return checkout
Beispiel #49
0
        zip_filename = os.path.dirname(filename)
<<<<<<< HEAD
        if (not os.path.isfile(zip_filename) and
                zipfile.is_zipfile(zip_filename)):
=======
        if (not os.path.isfile(zip_filename)
                and zipfile.is_zipfile(zip_filename)):
>>>>>>> bde4533e29dfedadf6bcf9d451baa615bc828a59
            raise InstallationError(
                'Module %s (in %s) isn\'t located in a zip file in %s'
                % (module_name, filename, zip_filename))
        package_path = os.path.dirname(zip_filename)
        if package_path not in self.paths():
            logger.warning(
                'Unpacking %s into %s, but %s is not on sys.path',
                display_path(zip_filename),
                display_path(package_path),
                display_path(package_path),
            )
        logger.info(
            'Unzipping %s (in %s)', module_name, display_path(zip_filename),
        )
        if self.simulate:
            logger.info(
                'Skipping remaining operations because of --simulate'
            )
            return

        with indent_log():
            # FIXME: this should be undoable:
            zip = zipfile.ZipFile(zip_filename)