Exemplo n.º 1
0
def _copy_file(filename, location, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
            display_path(download_location), ('i', 'w', 'b', 'a'))
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
            shutil.move(download_location, dest_file)
        elif response == 'a':
            sys.exit(-1)
    if copy:
        shutil.copy(filename, download_location)
        logger.info('Saved %s', display_path(download_location))
Exemplo n.º 2
0
 def archive(self, build_dir):
     # type: (str) -> None
     assert self.source_dir
     create_archive = True
     archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
     archive_path = os.path.join(build_dir, archive_name)
     if os.path.exists(archive_path):
         response = ask_path_exists(
             'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
             display_path(archive_path), ('i', 'w', 'b', 'a'))
         if response == 'i':
             create_archive = False
         elif response == 'w':
             logger.warning('Deleting %s', display_path(archive_path))
             os.remove(archive_path)
         elif response == 'b':
             dest_file = backup_dir(archive_path)
             logger.warning(
                 'Backing up %s to %s',
                 display_path(archive_path),
                 display_path(dest_file),
             )
             shutil.move(archive_path, dest_file)
         elif response == 'a':
             sys.exit(-1)
     if create_archive:
         zip = zipfile.ZipFile(
             archive_path, 'w', zipfile.ZIP_DEFLATED,
             allowZip64=True
         )
         dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
         for dirpath, dirnames, filenames in os.walk(dir):
             if 'ins-egg-info' in dirnames:
                 dirnames.remove('ins-egg-info')
             for dirname in dirnames:
                 dir_arcname = self._get_archive_name(dirname,
                                                      parentdir=dirpath,
                                                      rootdir=dir)
                 zipdir = zipfile.ZipInfo(dir_arcname + '/')
                 zipdir.external_attr = 0x1ED << 16  # 0o755
                 zip.writestr(zipdir, '')
             for filename in filenames:
                 if filename == ins_DELETE_MARKER_FILENAME:
                     continue
                 file_arcname = self._get_archive_name(filename,
                                                       parentdir=dirpath,
                                                       rootdir=dir)
                 filename = os.path.join(dirpath, filename)
                 zip.write(filename, file_arcname)
         zip.close()
         logger.info('Saved %s', display_path(archive_path))
Exemplo n.º 3
0
    def fetch_new(cls, dest, url, rev_options):
        rev_display = rev_options.to_display()
        logger.info(
            'Cloning %s%s to %s', redact_password_from_url(url),
            rev_display, display_path(dest),
        )
        cls.run_command(['clone', '-q', url, dest])

        if rev_options.rev:
            # Then a specific revision was requested.
            rev_options = cls.resolve_revision(dest, url, rev_options)
            branch_name = getattr(rev_options, 'branch_name', None)
            if branch_name is None:
                # Only do a checkout if the current commit id doesn't match
                # the requested revision.
                if not cls.is_commit_id_equal(dest, rev_options.rev):
                    cmd_args = ['checkout', '-q'] + rev_options.to_args()
                    cls.run_command(cmd_args, cwd=dest)
            elif cls.get_current_branch(dest) != branch_name:
                # Then a specific branch was requested, and that branch
                # is not yet checked out.
                track_branch = 'origin/{}'.format(branch_name)
                cmd_args = [
                    'checkout', '-b', branch_name, '--track', track_branch,
                ]
                cls.run_command(cmd_args, cwd=dest)

        #: repo may contain submodules
        cls.update_submodules(dest)
Exemplo n.º 4
0
 def fetch_new(cls, dest, url, rev_options):
     rev_display = rev_options.to_display()
     logger.info(
         'Checking out %s%s to %s',
         url,
         rev_display,
         display_path(dest),
     )
     cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
     cls.run_command(cmd_args)
Exemplo n.º 5
0
    def _correct_build_location(self):
        # type: () -> None
        """Move self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.run_egg_info to fix the temporary build
        directory.
        """
        if self.source_dir is not None:
            return
        assert self.req is not None
        assert self._temp_build_dir.path
        assert (self._ideal_build_dir is not None and
                self._ideal_build_dir.path)  # type: ignore
        old_location = self._temp_build_dir.path
        self._temp_build_dir.path = None

        new_location = self.build_location(self._ideal_build_dir)
        if os.path.exists(new_location):
            raise InstallationError(
                'A package already exists in %s; please remove it to continue'
                % display_path(new_location))
        logger.debug(
            'Moving package %s from %s to new location %s',
            self, display_path(old_location), display_path(new_location),
        )
        shutil.move(old_location, new_location)
        self._temp_build_dir.path = new_location
        self._ideal_build_dir = None
        self.source_dir = os.path.normpath(os.path.abspath(new_location))
        self._egg_info_path = None

        # Correct the metadata directory, if it exists
        if self.metadata_directory:
            old_meta = self.metadata_directory
            rel = os.path.relpath(old_meta, start=old_location)
            new_meta = os.path.join(new_location, rel)
            new_meta = os.path.normpath(os.path.abspath(new_meta))
            self.metadata_directory = new_meta
Exemplo n.º 6
0
 def fetch_new(cls, dest, url, rev_options):
     rev_display = rev_options.to_display()
     logger.info(
         'Cloning hg %s%s to %s',
         url,
         rev_display,
         display_path(dest),
     )
     cls.run_command(['clone', '--noupdate', '-q', url, dest])
     cmd_args = ['update', '-q'] + rev_options.to_args()
     cls.run_command(cmd_args, cwd=dest)
Exemplo n.º 7
0
 def _download_should_save(self):
     # type: () -> bool
     # TODO: Modify to reduce indentation needed
     if self.download_dir:
         self.download_dir = expanduser(self.download_dir)
         if os.path.exists(self.download_dir):
             return True
         else:
             logger.critical('Could not find download directory')
             raise InstallationError(
                 "Could not find or access download directory '%s'" %
                 display_path(self.download_dir))
     return False
Exemplo n.º 8
0
def get_metadata(dist):
    # type: (Distribution) -> Message
    if (isinstance(dist, pkg_resources.DistInfoDistribution)
            and dist.has_metadata('METADATA')):
        metadata = dist.get_metadata('METADATA')
    elif dist.has_metadata('PKG-INFO'):
        metadata = dist.get_metadata('PKG-INFO')
    else:
        logger.warning("No metadata found in %s", display_path(dist.location))
        metadata = ''

    feed_parser = FeedParser()
    feed_parser.feed(metadata)
    return feed_parser.close()
Exemplo n.º 9
0
 def assert_source_matches_version(self):
     # type: () -> None
     assert self.source_dir
     version = self.metadata['version']
     if self.req.specifier and version not in self.req.specifier:
         logger.warning(
             'Requested %s, but installing version %s',
             self,
             version,
         )
     else:
         logger.debug(
             'Source in %s has version %s, which satisfies requirement %s',
             display_path(self.source_dir),
             version,
             self,
         )
Exemplo n.º 10
0
 def __str__(self):
     # type: () -> str
     if self.req:
         s = str(self.req)
         if self.link:
             s += ' from %s' % redact_password_from_url(self.link.url)
     elif self.link:
         s = redact_password_from_url(self.link.url)
     else:
         s = '<InstallRequirement>'
     if self.satisfied_by is not None:
         s += ' in %s' % display_path(self.satisfied_by.location)
     if self.comes_from:
         if isinstance(self.comes_from, six.string_types):
             comes_from = self.comes_from
         else:
             comes_from = self.comes_from.from_path()
         if comes_from:
             s += ' (from %s)' % comes_from
     return s
Exemplo n.º 11
0
    def prepare_linked_requirement(
            self,
            req,  # type: InstallRequirement
            session,  # type: insSession
            finder,  # type: PackageFinder
            upgrade_allowed,  # type: bool
            require_hashes  # type: bool
    ):
        # type: (...) -> DistAbstraction
        """Prepare a requirement that would be obtained from req.link
        """
        # TODO: Breakup into smaller functions
        if req.link and req.link.scheme == 'file':
            path = url_to_path(req.link.url)
            logger.info('Processing %s', display_path(path))
        else:
            logger.info('Collecting %s', req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
                raise PreviousBuildDirError(
                    "ins can't proceed with requirements '%s' due to a"
                    " pre-existing build directory (%s). This is "
                    "likely due to a previous installation that failed"
                    ". ins is being responsible and not assuming it "
                    "can delete this. Please delete it and try again." %
                    (req, req.source_dir))
            req.populate_link(finder, upgrade_allowed, require_hashes)

            # We can't hit this spot and have populate_link return None.
            # req.satisfied_by is None here (because we're
            # guarded) and upgrade has no impact except when satisfied_by
            # is not None.
            # Then inside find_requirement existing_applicable -> False
            # If no new versions are found, DistributionNotFound is raised,
            # otherwise a result is guaranteed.
            assert req.link
            link = req.link

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if is_vcs_url(link):
                    raise VcsHashUnsupported()
                elif is_file_url(link) and is_dir_url(link):
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()

            hashes = req.hashes(trust_internet=not require_hashes)
            if require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            try:
                download_dir = self.download_dir
                # We always delete unpacked sdists after ins ran.
                autodelete_unpacked = True
                if req.link.is_wheel and self.wheel_download_dir:
                    # when doing 'ins wheel` we download wheels to a
                    # dedicated dir.
                    download_dir = self.wheel_download_dir
                if req.link.is_wheel:
                    if download_dir:
                        # When downloading, we only unpack wheels to get
                        # metadata.
                        autodelete_unpacked = True
                    else:
                        # When installing a wheel, we use the unpacked
                        # wheel.
                        autodelete_unpacked = False
                unpack_url(req.link,
                           req.source_dir,
                           download_dir,
                           autodelete_unpacked,
                           session=session,
                           hashes=hashes,
                           progress_bar=self.progress_bar)
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
                    'Could not install requirement %s because of HTTP '
                    'error %s for URL %s' % (req, exc, req.link))
            abstract_dist = make_abstract_dist(req)
            with self.req_tracker.track(req):
                abstract_dist.prep_for_dist(finder, self.build_isolation)
            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if not req.link.is_artifact:
                    req.archive(self.download_dir)
        return abstract_dist
Exemplo n.º 12
0
    def obtain(self, dest):
        # type: (str) -> None
        """
        Install or update in editable mode the package represented by this
        VersionControl object.

        Args:
          dest: the repository directory in which to install or update.
        """
        url, rev_options = self.get_url_rev_options(self.url)

        if not os.path.exists(dest):
            self.fetch_new(dest, url, rev_options)
            return

        rev_display = rev_options.to_display()
        if self.is_repository_directory(dest):
            existing_url = self.get_remote_url(dest)
            if self.compare_urls(existing_url, url):
                logger.debug(
                    '%s in %s exists, and has correct URL (%s)',
                    self.repo_name.title(),
                    display_path(dest),
                    url,
                )
                if not self.is_commit_id_equal(dest, rev_options.rev):
                    logger.info(
                        'Updating %s %s%s',
                        display_path(dest),
                        self.repo_name,
                        rev_display,
                    )
                    self.update(dest, url, rev_options)
                else:
                    logger.info('Skipping because already up-to-date.')
                return

            logger.warning(
                '%s %s in %s exists with URL %s',
                self.name,
                self.repo_name,
                display_path(dest),
                existing_url,
            )
            prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w',
                                                                'b'))
        else:
            logger.warning(
                'Directory %s already exists, and is not a %s %s.',
                dest,
                self.name,
                self.repo_name,
            )
            # https://github.com/python/mypy/issues/1174
            prompt = (
                '(i)gnore, (w)ipe, (b)ackup ',  # type: ignore
                ('i', 'w', 'b'))

        logger.warning(
            'The plan is to install the %s repository %s',
            self.name,
            url,
        )
        response = ask_path_exists('What to do?  %s' % prompt[0], prompt[1])

        if response == 'a':
            sys.exit(-1)

        if response == 'w':
            logger.warning('Deleting %s', display_path(dest))
            rmtree(dest)
            self.fetch_new(dest, url, rev_options)
            return

        if response == 'b':
            dest_dir = backup_dir(dest)
            logger.warning(
                'Backing up %s to %s',
                display_path(dest),
                dest_dir,
            )
            shutil.move(dest, dest_dir)
            self.fetch_new(dest, url, rev_options)
            return

        # Do nothing if the response is "i".
        if response == 's':
            logger.info(
                'Switching %s %s to %s%s',
                self.repo_name,
                display_path(dest),
                url,
                rev_display,
            )
            self.switch(dest, url, rev_options)