def clobber(source, dest, is_base, fixer=None, filter=None): ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') and canonicalize_name(s).startswith( canonicalize_name(req.name))): assert not info_dir, ('Multiple .dist-info directories: ' + destsubdir + ', ' + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = (st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed)
def run_egg_info(self): assert self.source_dir if self.name: logger.debug( 'Running setup.py (path:%s) egg_info for package %s', self.setup_py, self.name, ) else: logger.debug( 'Running setup.py (path:%s) egg_info for package from %s', self.setup_py, self.link, ) with indent_log(): script = SETUPTOOLS_SHIM % self.setup_py base_cmd = [os.environ['PIP_PYTHON_PATH'], '-c', script] if self.isolated: base_cmd += ["--no-user-cfg"] egg_info_cmd = base_cmd + ['egg_info'] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable: egg_base_option = [] else: egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') ensure_dir(egg_info_dir) egg_base_option = ['--egg-base', 'pip-egg-info'] call_subprocess(egg_info_cmd + egg_base_option, cwd=self.setup_py_dir, show_stdout=False, command_desc='python setup.py egg_info') if not self.req: if isinstance(parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" self.req = Requirement("".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], ])) self._correct_build_location() else: metadata_name = canonicalize_name(self.pkg_info()["Name"]) if canonicalize_name(self.req.name) != metadata_name: logger.warning( 'Running setup.py (path:%s) egg_info for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.setup_py, self.name, metadata_name, self.name) self.req = Requirement(metadata_name)
def mkurl_pypi_url(url): loc = posixpath.join( url, urllib_parse.quote(canonicalize_name(project_name))) # For maximum compatibility with easy_install, ensure the path # ends in a trailing slash. Although this isn't in the spec # (and PyPI can handle it without the slash) some other index # implementations might break if they relied on easy_install's # behavior. if not loc.endswith('/'): loc = loc + '/' return loc
def fmt_ctl_handle_mutual_exclude(value, target, other): new = value.split(',') while ':all:' in new: other.clear() target.clear() target.add(':all:') del new[:new.index(':all:') + 1] if ':none:' not in new: # Without a none, we want to discard everything as :all: covers it return for name in new: if name == ':none:': target.clear() continue name = canonicalize_name(name) other.discard(name) target.add(name)
def cached_wheel(cache_dir, link, format_control, package_name): if not cache_dir: return link if not link: return link if link.is_wheel: return link if not link.is_artifact: return link if not package_name: return link canonical_name = canonicalize_name(package_name) formats = pip9.index.fmt_ctl_formats(format_control, canonical_name) if "binary" not in formats: return link root = _cache_for_link(cache_dir, link) try: wheel_names = os.listdir(root) except OSError as e: if e.errno in (errno.ENOENT, errno.ENOTDIR): return link raise candidates = [] for wheel_name in wheel_names: try: wheel = Wheel(wheel_name) except InvalidWheelFilename: continue if not wheel.supported(): # Built for a different python/arch/etc continue candidates.append((wheel.support_index_min(), wheel_name)) if not candidates: return link candidates.sort() path = os.path.join(root, candidates[0][1]) return pip9.index.Link(path_to_url(path))
def _link_package_versions(self, link, search, ignore_compatibility=True): """Return an InstallationCandidate or None""" version = None if link.egg_fragment: egg_info = link.egg_fragment ext = link.ext else: egg_info, ext = link.splitext() if not ext: self._log_skipped_link(link, 'not a file') return # Always ignore unsupported extensions even when we ignore compatibility if ext not in SUPPORTED_EXTENSIONS: self._log_skipped_link( link, 'unsupported archive format: %s' % ext) return if "binary" not in search.formats and ext == wheel_ext and not ignore_compatibility: self._log_skipped_link( link, 'No binaries permitted for %s' % search.supplied) return if "macosx10" in link.path and ext == '.zip' and not ignore_compatibility: self._log_skipped_link(link, 'macosx10 one') return if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: self._log_skipped_link(link, 'invalid wheel filename') return if canonicalize_name(wheel.name) != search.canonical: self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return if not wheel.supported(self.valid_tags) and not ignore_compatibility: self._log_skipped_link( link, 'it is not compatible with this Python') return version = wheel.version # This should be up by the search.ok_binary check, but see issue 2700. if "source" not in search.formats and ext != wheel_ext: self._log_skipped_link( link, 'No sources permitted for %s' % search.supplied) return if not version: version = egg_info_matches(egg_info, search.supplied, link) if version is None: self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: self._log_skipped_link( link, 'Python version is incorrect') return try: support_this_python = check_requires_python(link.requires_python) except specifiers.InvalidSpecifier: logger.debug("Package %s has an invalid Requires-Python entry: %s", link.filename, link.requires_python) support_this_python = True if not support_this_python and not ignore_compatibility: logger.debug("The package %s is incompatible with the python" "version in use. Acceptable python versions are:%s", link, link.requires_python) return logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search.supplied, version, link, link.requires_python)
def find_all_candidates(self, project_name): """Find all available InstallationCandidate for project_name This checks index_urls, find_links and dependency_links. All versions found are returned as an InstallationCandidate list. See _link_package_versions for details on which files are accepted """ index_locations = self._get_index_urls_locations(project_name) index_file_loc, index_url_loc = self._sort_locations(index_locations) fl_file_loc, fl_url_loc = self._sort_locations( self.find_links, expand_dir=True) dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) file_locations = ( Link(url) for url in itertools.chain( index_file_loc, fl_file_loc, dep_file_loc) ) # We trust every url that the user has given us whether it was given # via --index-url or --find-links # We explicitly do not trust links that came from dependency_links # We want to filter out any thing which does not have a secure origin. url_locations = [ link for link in itertools.chain( (Link(url) for url in index_url_loc), (Link(url) for url in fl_url_loc), (Link(url) for url in dep_url_loc), ) if self._validate_secure_origin(logger, link) ] logger.debug('%d location(s) to search for versions of %s:', len(url_locations), project_name) for location in url_locations: logger.debug('* %s', location) canonical_name = canonicalize_name(project_name) formats = fmt_ctl_formats(self.format_control, canonical_name) search = Search(project_name, canonical_name, formats) find_links_versions = self._package_versions( # We trust every directly linked archive in find_links (Link(url, '-f') for url in self.find_links), search ) page_versions = [] for page in self._get_pages(url_locations, project_name): logger.debug('Analyzing links from page %s', page.url) with indent_log(): page_versions.extend( self._package_versions(page.links, search) ) dependency_versions = self._package_versions( (Link(url) for url in self.dependency_links), search ) if dependency_versions: logger.debug( 'dependency_links found: %s', ', '.join([ version.location.url for version in dependency_versions ]) ) file_versions = self._package_versions(file_locations, search) if file_versions: file_versions.sort(reverse=True) logger.debug( 'Local files found: %s', ', '.join([ url_to_path(candidate.location.url) for candidate in file_versions ]) ) # This is an intentional priority ordering return ( file_versions + find_links_versions + page_versions + dependency_versions )
def build(self, autobuilding=False): """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ assert self._wheel_dir or (autobuilding and self._cache_root) # unpack sdists and constructs req set self.requirement_set.prepare_files(self.finder) reqset = self.requirement_set.requirements.values() buildset = [] for req in reqset: if req.constraint: continue if req.is_wheel: if not autobuilding: logger.info('Skipping %s, due to already being wheel.', req.name) elif autobuilding and req.editable: pass elif autobuilding and req.link and not req.link.is_artifact: pass elif autobuilding and not req.source_dir: pass else: if autobuilding: link = req.link base, ext = link.splitext() if pip9.index.egg_info_matches(base, None, link) is None: # Doesn't look like a package - don't autobuild a wheel # because we'll have no way to lookup the result sanely continue if "binary" not in pip9.index.fmt_ctl_formats( self.finder.format_control, canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name) continue buildset.append(req) if not buildset: return True # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for req in buildset]), ) with indent_log(): build_success, build_failure = [], [] for req in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag output_dir = _cache_for_link(self._cache_root, req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists( os.path.join(req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.requirement_set.build_dir) # Update the link for this. req.link = pip9.index.Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url(req.link, req.source_dir, None, False, session=self.requirement_set.session) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return True if all builds were successful return len(build_failure) == 0
def freeze(requirement=None, find_links=None, local_only=None, user_only=None, skip_regex=None, default_vcs=None, isolated=False, wheel_cache=None, skip=()): find_links = find_links or [] skip_match = None if skip_regex: skip_match = re.compile(skip_regex).search dependency_links = [] for dist in pkg_resources.working_set: if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt')) for link in find_links: if '#egg=' in link: dependency_links.append(link) for link in find_links: yield '-f %s' % link installations = {} for dist in get_installed_distributions(local_only=local_only, skip=(), user_only=user_only): try: req = pip9.FrozenRequirement.from_dist(dist, dependency_links) except RequirementParseError: logger.warning("Could not parse requirement: %s", dist.project_name) continue installations[req.name] = req if requirement: # the options that don't get turned into an InstallRequirement # should only be emitted once, even if the same option is in multiple # requirements files, so we need to keep track of what has been emitted # so that we don't emit it again if it's seen again emitted_options = set() for req_file_path in requirement: with open(req_file_path) as req_file: for line in req_file: if (not line.strip() or line.strip().startswith('#') or (skip_match and skip_match(line)) or line.startswith( ('-r', '--requirement', '-Z', '--always-unzip', '-f', '--find-links', '-i', '--index-url', '--pre', '--trusted-host', '--process-dependency-links', '--extra-index-url'))): line = line.rstrip() if line not in emitted_options: emitted_options.add(line) yield line continue if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') line_req = InstallRequirement.from_editable( line, default_vcs=default_vcs, isolated=isolated, wheel_cache=wheel_cache, ) else: line_req = InstallRequirement.from_line( COMMENT_RE.sub('', line).strip(), isolated=isolated, wheel_cache=wheel_cache, ) if not line_req.name: logger.info( "Skipping line in requirement file [%s] because " "it's not clear what it would install: %s", req_file_path, line.strip(), ) logger.info( " (add #egg=PackageName to the URL to avoid" " this warning)") elif line_req.name not in installations: logger.warning( "Requirement file [%s] contains %s, but that " "package is not installed", req_file_path, COMMENT_RE.sub('', line).strip(), ) else: yield str(installations[line_req.name]).rstrip() del installations[line_req.name] yield ('## The following requirements were added by ' 'pip freeze:') for installation in sorted(installations.values(), key=lambda x: x.name.lower()): if canonicalize_name(installation.name) not in skip: yield str(installation).rstrip()
def search_packages_info(query): """ Gather details from installed distributions. Print distribution name, version, location, and installed files. Installed files requires a pip generated 'installed-files.txt' in the distributions '.egg-info' directory. """ installed = {} for p in pkg_resources.working_set: installed[canonicalize_name(p.project_name)] = p query_names = [canonicalize_name(name) for name in query] for dist in [installed[pkg] for pkg in query_names if pkg in installed]: package = { 'name': dist.project_name, 'version': dist.version, 'location': dist.location, 'requires': [dep.project_name for dep in dist.requires()], } file_list = None metadata = None if isinstance(dist, pkg_resources.DistInfoDistribution): # RECORDs should be part of .dist-info metadatas if dist.has_metadata('RECORD'): lines = dist.get_metadata_lines('RECORD') paths = [l.split(',')[0] for l in lines] paths = [os.path.join(dist.location, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] if dist.has_metadata('METADATA'): metadata = dist.get_metadata('METADATA') else: # Otherwise use pip's log for .egg-info's if dist.has_metadata('installed-files.txt'): paths = dist.get_metadata_lines('installed-files.txt') paths = [os.path.join(dist.egg_info, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] if dist.has_metadata('PKG-INFO'): metadata = dist.get_metadata('PKG-INFO') if dist.has_metadata('entry_points.txt'): entry_points = dist.get_metadata_lines('entry_points.txt') package['entry_points'] = entry_points if dist.has_metadata('INSTALLER'): for line in dist.get_metadata_lines('INSTALLER'): if line.strip(): package['installer'] = line.strip() break # @todo: Should pkg_resources.Distribution have a # `get_pkg_info` method? feed_parser = FeedParser() feed_parser.feed(metadata) pkg_info_dict = feed_parser.close() for key in ('metadata-version', 'summary', 'home-page', 'author', 'author-email', 'license'): package[key] = pkg_info_dict.get(key) # It looks like FeedParser cannot deal with repeated headers classifiers = [] for line in metadata.splitlines(): if line.startswith('Classifier: '): classifiers.append(line[len('Classifier: '):]) package['classifiers'] = classifiers if file_list: package['files'] = sorted(file_list) yield package