def pkg_resources_distribution_for_wheel(wheel_zip, name, location): # type: (ZipFile, str, str) -> Distribution """Get a pkg_resources distribution given a wheel. :raises UnsupportedWheel: on any errors """ info_dir, _ = parse_wheel(wheel_zip, name) metadata_files = [ p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir)) ] metadata_text = {} # type: Dict[str, bytes] for path in metadata_files: # If a flag is set, namelist entries may be unicode in Python 2. # We coerce them to native str type to match the types used in the rest # of the code. This cannot fail because unicode can always be encoded # with UTF-8. full_path = ensure_str(path) _, metadata_name = full_path.split("/", 1) try: metadata_text[metadata_name] = read_wheel_metadata_file( wheel_zip, full_path) except UnsupportedWheel as e: raise UnsupportedWheel("{} has an invalid wheel, {}".format( name, str(e))) metadata = WheelMetadata(metadata_text, location) return DistInfoDistribution(location=location, metadata=metadata, project_name=name)
def wheel_dist_info_dir(source, name): # type: (ZipFile, str) -> str """Returns the name of the contained .dist-info directory. Raises AssertionError or UnsupportedWheel if not found, >1 found, or it doesn't match the provided name. """ # Zip file path separators must be / subdirs = list(set(p.split("/")[0] for p in source.namelist())) info_dirs = [s for s in subdirs if s.endswith('.dist-info')] if not info_dirs: raise UnsupportedWheel(".dist-info directory not found") if len(info_dirs) > 1: raise UnsupportedWheel( "multiple .dist-info directories found: {}".format( ", ".join(info_dirs))) info_dir = info_dirs[0] info_dir_name = canonicalize_name(info_dir) canonical_name = canonicalize_name(name) if not info_dir_name.startswith(canonical_name): raise UnsupportedWheel( ".dist-info directory {!r} does not start with {!r}".format( info_dir, canonical_name)) # Zip file paths can be unicode or str depending on the zip entry flags, # so normalize it. return ensure_str(info_dir)
def get_metadata(self, name): # type: (str) -> str try: return ensure_str(self._metadata[name]) except UnicodeDecodeError as e: # Mirrors handling done in pkg_resources.NullProvider. e.reason += " in {} file".format(name) raise
def wheel_metadata(source, dist_info_dir): # type: (ZipFile, str) -> Message """Return the WHEEL metadata of an extracted wheel, if possible. Otherwise, raise UnsupportedWheel. """ path = "{}/WHEEL".format(dist_info_dir) # Zip file path separators must be / wheel_contents = read_wheel_metadata_file(source, path) try: wheel_text = ensure_str(wheel_contents) except UnicodeDecodeError as e: raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e)) # FeedParser (used by Parser) does not raise any exceptions. The returned # message may have .defects populated, but for backwards-compatibility we # currently ignore them. return Parser().parsestr(wheel_text)
def _normalized_outrows(outrows): # type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]] """Normalize the given rows of a RECORD file. Items in each row are converted into str. Rows are then sorted to make the value more predictable for tests. Each row is a 3-tuple (path, hash, size) and corresponds to a record of a RECORD file (see PEP 376 and PEP 427 for details). For the rows passed to this function, the size can be an integer as an int or string, or the empty string. """ # Normally, there should only be one row per path, in which case the # second and third elements don't come into play when sorting. # However, in cases in the wild where a path might happen to occur twice, # we don't want the sort operation to trigger an error (but still want # determinism). Since the third element can be an int or string, we # coerce each element to a string to avoid a TypeError in this case. # For additional background, see-- # https://github.com/pypa/pip/issues/5868 return sorted( (ensure_str(record_path, encoding='utf-8'), hash_, str(size)) for record_path, hash_, size in outrows )
def _install_wheel( name, # type: str wheel_zip, # type: ZipFile wheel_path, # type: str scheme, # type: Scheme pycompile=True, # type: bool warn_script_location=True, # type: bool direct_url=None, # type: Optional[DirectUrl] requested=False, # type: bool ): # type: (...) -> None """Install a wheel. :param name: Name of the project to install :param wheel_zip: open ZipFile for wheel being installed :param scheme: Distutils scheme dictating the install directories :param req_description: String used in place of the requirement, for logging :param pycompile: Whether to byte-compile installed Python files :param warn_script_location: Whether to check that scripts are installed into a directory on PATH :raises UnsupportedWheel: * when the directory holds an unpacked wheel with incompatible Wheel-Version * when the .dist-info dir does not match the wheel """ info_dir, metadata = parse_wheel(wheel_zip, name) if wheel_root_is_purelib(metadata): lib_dir = scheme.purelib else: lib_dir = scheme.platlib # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} # type: Dict[RecordPath, RecordPath] changed = set() # type: Set[RecordPath] generated = [] # type: List[str] def record_installed(srcfile, destfile, modified=False): # type: (RecordPath, str, bool) -> None """Map archive RECORD paths to installation RECORD paths.""" newpath = _fs_to_record_path(destfile, lib_dir) installed[srcfile] = newpath if modified: changed.add(_fs_to_record_path(destfile)) def all_paths(): # type: () -> Iterable[RecordPath] names = wheel_zip.namelist() # If a flag is set, names may be unicode in Python 2. We convert to # text explicitly so these are valid for lookup in RECORD. decoded_names = map(ensure_text, names) for name in decoded_names: yield cast("RecordPath", name) def is_dir_path(path): # type: (RecordPath) -> bool return path.endswith("/") def assert_no_path_traversal(dest_dir_path, target_path): # type: (str, str) -> None if not is_within_directory(dest_dir_path, target_path): message = ( "The wheel {!r} has a file {!r} trying to install" " outside the target directory {!r}" ) raise InstallationError( message.format(wheel_path, target_path, dest_dir_path) ) def root_scheme_file_maker(zip_file, dest): # type: (ZipFile, str) -> Callable[[RecordPath], File] def make_root_scheme_file(record_path): # type: (RecordPath) -> File normed_path = os.path.normpath(record_path) dest_path = os.path.join(dest, normed_path) assert_no_path_traversal(dest, dest_path) return ZipBackedFile(record_path, dest_path, zip_file) return make_root_scheme_file def data_scheme_file_maker(zip_file, scheme): # type: (ZipFile, Scheme) -> Callable[[RecordPath], File] scheme_paths = {} for key in SCHEME_KEYS: encoded_key = ensure_text(key) scheme_paths[encoded_key] = ensure_text( getattr(scheme, key), encoding=sys.getfilesystemencoding() ) def make_data_scheme_file(record_path): # type: (RecordPath) -> File normed_path = os.path.normpath(record_path) try: _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) except ValueError: message = ( "Unexpected file in {}: {!r}. .data directory contents" " should be named like: '<scheme key>/<path>'." ).format(wheel_path, record_path) raise InstallationError(message) try: scheme_path = scheme_paths[scheme_key] except KeyError: valid_scheme_keys = ", ".join(sorted(scheme_paths)) message = ( "Unknown scheme key used in {}: {} (for file {!r}). .data" " directory contents should be in subdirectories named" " with a valid scheme key ({})" ).format( wheel_path, scheme_key, record_path, valid_scheme_keys ) raise InstallationError(message) dest_path = os.path.join(scheme_path, dest_subpath) assert_no_path_traversal(scheme_path, dest_path) return ZipBackedFile(record_path, dest_path, zip_file) return make_data_scheme_file def is_data_scheme_path(path): # type: (RecordPath) -> bool return path.split("/", 1)[0].endswith(".data") paths = all_paths() file_paths = filterfalse(is_dir_path, paths) root_scheme_paths, data_scheme_paths = partition( is_data_scheme_path, file_paths ) make_root_scheme_file = root_scheme_file_maker( wheel_zip, ensure_text(lib_dir, encoding=sys.getfilesystemencoding()), ) files = map(make_root_scheme_file, root_scheme_paths) def is_script_scheme_path(path): # type: (RecordPath) -> bool parts = path.split("/", 2) return ( len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" ) other_scheme_paths, script_scheme_paths = partition( is_script_scheme_path, data_scheme_paths ) make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) other_scheme_files = map(make_data_scheme_file, other_scheme_paths) files = chain(files, other_scheme_files) # Get the defined entry points distribution = get_wheel_distribution(wheel_path, canonicalize_name(name)) console, gui = get_entrypoints(distribution) def is_entrypoint_wrapper(file): # type: (File) -> bool # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools path = file.dest_path name = os.path.basename(path) if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name # Ignore setuptools-generated scripts return (matchname in console or matchname in gui) script_scheme_files = map(make_data_scheme_file, script_scheme_paths) script_scheme_files = filterfalse( is_entrypoint_wrapper, script_scheme_files ) script_scheme_files = map(ScriptFile, script_scheme_files) files = chain(files, script_scheme_files) for file in files: file.save() record_installed(file.src_record_path, file.dest_path, file.changed) def pyc_source_file_paths(): # type: () -> Iterator[str] # We de-duplicate installation paths, since there can be overlap (e.g. # file in .data maps to same location as file in wheel root). # Sorting installation paths makes it easier to reproduce and debug # issues related to permissions on existing files. for installed_path in sorted(set(installed.values())): full_installed_path = os.path.join(lib_dir, installed_path) if not os.path.isfile(full_installed_path): continue if not full_installed_path.endswith('.py'): continue yield full_installed_path def pyc_output_path(path): # type: (str) -> str """Return the path the pyc file would have been written to. """ return importlib.util.cache_from_source(path) # Compile all of the pyc files for the installed files if pycompile: with captured_stdout() as stdout: with warnings.catch_warnings(): warnings.filterwarnings('ignore') for path in pyc_source_file_paths(): # Python 2's `compileall.compile_file` requires a str in # error cases, so we must convert to the native type. path_arg = ensure_str( path, encoding=sys.getfilesystemencoding() ) success = compileall.compile_file( path_arg, force=True, quiet=True ) if success: pyc_path = pyc_output_path(path) assert os.path.exists(pyc_path) pyc_record_path = cast( "RecordPath", pyc_path.replace(os.path.sep, "/") ) record_installed(pyc_record_path, pyc_path) logger.debug(stdout.getvalue()) maker = PipScriptMaker(None, scheme.scripts) # Ensure old scripts are overwritten. # See https://github.com/pypa/pip/issues/1800 maker.clobber = True # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ maker.variants = {''} # This is required because otherwise distlib creates scripts that are not # executable. # See https://bitbucket.org/pypa/distlib/issue/32/ maker.set_mode = True # Generate the console and GUI entry points specified in the wheel scripts_to_generate = get_console_script_specs(console) gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items())) generated_console_scripts = maker.make_multiple(scripts_to_generate) generated.extend(generated_console_scripts) generated.extend( maker.make_multiple(gui_scripts_to_generate, {'gui': True}) ) if warn_script_location: msg = message_about_scripts_not_on_PATH(generated_console_scripts) if msg is not None: logger.warning(msg) generated_file_mode = 0o666 & ~current_umask() @contextlib.contextmanager def _generate_file(path, **kwargs): # type: (str, **Any) -> Iterator[BinaryIO] with adjacent_tmp_file(path, **kwargs) as f: yield f os.chmod(f.name, generated_file_mode) replace(f.name, path) dest_info_dir = os.path.join(lib_dir, info_dir) # Record pip as the installer installer_path = os.path.join(dest_info_dir, 'INSTALLER') with _generate_file(installer_path) as installer_file: installer_file.write(b'pip\n') generated.append(installer_path) # Record the PEP 610 direct URL reference if direct_url is not None: direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) with _generate_file(direct_url_path) as direct_url_file: direct_url_file.write(direct_url.to_json().encode("utf-8")) generated.append(direct_url_path) # Record the REQUESTED file if requested: requested_path = os.path.join(dest_info_dir, 'REQUESTED') with open(requested_path, "wb"): pass generated.append(requested_path) record_text = distribution.read_text('RECORD') record_rows = list(csv.reader(record_text.splitlines())) rows = get_csv_rows_for_installed( record_rows, installed=installed, changed=changed, generated=generated, lib_dir=lib_dir) # Record details of all files installed record_path = os.path.join(dest_info_dir, 'RECORD') with _generate_file(record_path, **csv_io_kwargs('w')) as record_file: # The type mypy infers for record_file is different for Python 3 # (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly # cast to typing.IO[str] as a workaround. writer = csv.writer(cast('IO[str]', record_file)) writer.writerows(_normalized_outrows(rows))