def _get_module_code(self, fullname): path = _get_module_path(self, fullname) for suffix, isbytecode, ispackage in _zip_searchorder: fullpath = path + suffix _bootstrap._verbose_message('trying {}{}{}', self.archive, path_sep, fullpath, verbosity=2) try: toc_entry = self._files[fullpath] except KeyError: pass else: modpath = toc_entry[0] data = _get_data(self.archive, toc_entry) if isbytecode: code = _unmarshal_code(self, modpath, fullpath, fullname, data) else: code = _compile_source(modpath, data) if code is None: # bad magic number or non-matching mtime # in byte code, try next continue modpath = toc_entry[0] return code, ispackage, modpath else: raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
def _unmarshal_code(pathname, data, mtime): if len(data) < 16: raise ZipImportError('bad pyc data') if data[:4] != _bootstrap_external.MAGIC_NUMBER: _bootstrap._verbose_message('{!r} has bad magic', pathname) return None # signal caller to try alternative flags = _unpack_uint32(data[4:8]) if flags != 0: # Hash-based pyc. We currently refuse to handle checked hash-based # pycs. We could validate hash-based pycs against the source, but it # seems likely that most people putting hash-based pycs in a zipfile # will use unchecked ones. if (_imp.check_hash_based_pycs != 'never' and (flags != 0x1 or _imp.check_hash_based_pycs == 'always')): return None elif mtime != 0 and not _eq_mtime(_unpack_uint32(data[8:12]), mtime): _bootstrap._verbose_message('{!r} has bad mtime', pathname) return None # signal caller to try alternative # XXX the pyc's size field is ignored; timestamp collisions are probably # unimportant with zip files. code = marshal.loads(data[16:]) if not isinstance(code, _code_type): raise TypeError(f'compiled module {pathname!r} is not a code object') return code
def _unmarshal_code(self, pathname, fullpath, fullname, data): exc_details = { 'name': fullname, 'path': fullpath, } try: flags = _bootstrap_external._classify_pyc(data, fullname, exc_details) except ImportError: return None hash_based = flags & 0b1 != 0 if hash_based: check_source = flags & 0b10 != 0 if (_imp.check_hash_based_pycs != 'never' and (check_source or _imp.check_hash_based_pycs == 'always')): source_bytes = _get_pyc_source(self, fullpath) if source_bytes is not None: source_hash = _imp.source_hash( _bootstrap_external._RAW_MAGIC_NUMBER, source_bytes, ) try: _boostrap_external._validate_hash_pyc( data, source_hash, fullname, exc_details) except ImportError: return None else: source_mtime, source_size = \ _get_mtime_and_size_of_source(self, fullpath) if source_mtime: # We don't use _bootstrap_external._validate_timestamp_pyc # to allow for a more lenient timestamp check. if (not _eq_mtime(_unpack_uint32(data[8:12]), source_mtime) or _unpack_uint32(data[12:16]) != source_size): _bootstrap._verbose_message( f'bytecode is stale for {fullname!r}') return None code = marshal.loads(data[16:]) if not isinstance(code, _code_type): raise TypeError(f'compiled module {pathname!r} is not a code object') return code
def _unmarshal_code(self, pathname, fullpath, fullname, data): exc_details = { 'name': fullname, 'path': fullpath, } try: flags = _bootstrap_external._classify_pyc(data, fullname, exc_details) except ImportError: return None hash_based = flags & 0b1 != 0 if hash_based: check_source = flags & 0b10 != 0 if (_imp.check_hash_based_pycs != 'never' and (check_source or _imp.check_hash_based_pycs == 'always')): source_bytes = _get_pyc_source(self, fullpath) if source_bytes is not None: source_hash = _imp.source_hash( _bootstrap_external._RAW_MAGIC_NUMBER, source_bytes, ) try: _boostrap_external._validate_hash_pyc( data, source_hash, fullname, exc_details) except ImportError: return None else: source_mtime, source_size = \ _get_mtime_and_size_of_source(self, fullpath) if source_mtime: # We don't use _bootstrap_external._validate_timestamp_pyc # to allow for a more lenient timestamp check. if (not _eq_mtime(_unpack_uint32(data[8:12]), source_mtime) or _unpack_uint32(data[12:16]) != source_size): _bootstrap._verbose_message( f'bytecode is stale for {fullname!r}') return None code = marshal.loads(data[16:]) if not isinstance(code, _code_type): raise TypeError(f'compiled module {pathname!r} is not a code object') return code
def load_module(self, fullname): """load_module(fullname) -> module. Load the module specified by 'fullname'. 'fullname' must be the fully qualified (dotted) module name. It returns the imported module, or raises ZipImportError if it could not be imported. Deprecated since Python 3.10. Use exec_module() instead. """ msg = ( "zipimport.zipimporter.load_module() is deprecated and slated for " "removal in Python 3.12; use exec_module() instead") _warnings.warn(msg, DeprecationWarning) code, ispackage, modpath = _get_module_code(self, fullname) mod = sys.modules.get(fullname) if mod is None or not isinstance(mod, _module_type): mod = _module_type(fullname) sys.modules[fullname] = mod mod.__loader__ = self try: if ispackage: # add __path__ to the module *before* the code gets # executed path = _get_module_path(self, fullname) fullpath = _bootstrap_external._path_join(self.archive, path) mod.__path__ = [fullpath] if not hasattr(mod, '__builtins__'): mod.__builtins__ = __builtins__ _bootstrap_external._fix_up_module(mod.__dict__, fullname, modpath) exec(code, mod.__dict__) except: del sys.modules[fullname] raise try: mod = sys.modules[fullname] except KeyError: raise ImportError( f'Loaded module {fullname!r} not found in sys.modules') _bootstrap._verbose_message('import {} # loaded from Zip {}', fullname, modpath) return mod
def load_module(self, fullname): """load_module(fullname) -> module. Load the module specified by 'fullname'. 'fullname' must be the fully qualified (dotted) module name. It returns the imported module, or raises ZipImportError if it wasn't found. """ code, ispackage, modpath = _get_module_code(self, fullname) mod = sys.modules.get(fullname) if mod is None or not isinstance(mod, _module_type): mod = _module_type(fullname) sys.modules[fullname] = mod mod.__loader__ = self try: if ispackage: # add __path__ to the module *before* the code gets # executed path = _get_module_path(self, fullname) fullpath = _bootstrap_external._path_join(self.archive, path) mod.__path__ = [fullpath] if not hasattr(mod, '__builtins__'): mod.__builtins__ = __builtins__ _bootstrap_external._fix_up_module(mod.__dict__, fullname, modpath) exec(code, mod.__dict__) except: del sys.modules[fullname] raise try: mod = sys.modules[fullname] except KeyError: raise ImportError( f'Loaded module {fullname!r} not found in sys.modules') _bootstrap._verbose_message('import {} # loaded from Zip {}', fullname, modpath) return mod
def _get_module_code(self, fullname): path = _get_module_path(self, fullname) for suffix, isbytecode, ispackage in _zip_searchorder: fullpath = path + suffix _bootstrap._verbose_message('trying {}{}{}', self.archive, path_sep, fullpath, verbosity=2) try: toc_entry = self._files[fullpath] except KeyError: pass else: modpath = toc_entry[0] data = _get_data(self.archive, toc_entry) if isbytecode: code = _unmarshal_code(self, modpath, fullpath, fullname, data) else: code = _compile_source(modpath, data) if code is None: # bad magic number or non-matching mtime # in byte code, try next continue modpath = toc_entry[0] return code, ispackage, modpath else: raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
def load_module(self, fullname): """load_module(fullname) -> module. Load the module specified by 'fullname'. 'fullname' must be the fully qualified (dotted) module name. It returns the imported module, or raises ZipImportError if it wasn't found. """ code, ispackage, modpath = _get_module_code(self, fullname) mod = sys.modules.get(fullname) if mod is None or not isinstance(mod, _module_type): mod = _module_type(fullname) sys.modules[fullname] = mod mod.__loader__ = self try: if ispackage: # add __path__ to the module *before* the code gets # executed path = _get_module_path(self, fullname) fullpath = _bootstrap_external._path_join(self.archive, path) mod.__path__ = [fullpath] if not hasattr(mod, '__builtins__'): mod.__builtins__ = __builtins__ _bootstrap_external._fix_up_module(mod.__dict__, fullname, modpath) exec(code, mod.__dict__) except: del sys.modules[fullname] raise try: mod = sys.modules[fullname] except KeyError: raise ImportError(f'Loaded module {fullname!r} not found in sys.modules') _bootstrap._verbose_message('import {} # loaded from Zip {}', fullname, modpath) return mod
def _get_decompress_func(): global _importing_zlib if _importing_zlib: # Someone has a zlib.py[co] in their Zip file # let's avoid a stack overflow. _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE') raise ZipImportError("can't decompress data; zlib not available") _importing_zlib = True try: from zlib import decompress except Exception: _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE') raise ZipImportError("can't decompress data; zlib not available") finally: _importing_zlib = False _bootstrap._verbose_message('zipimport: zlib available') return decompress
def _get_decompress_func(): global _importing_zlib if _importing_zlib: # Someone has a zlib.py[co] in their Zip file # let's avoid a stack overflow. _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE') raise ZipImportError("can't decompress data; zlib not available") _importing_zlib = True try: from zlib import decompress except Exception: _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE') raise ZipImportError("can't decompress data; zlib not available") finally: _importing_zlib = False _bootstrap._verbose_message('zipimport: zlib available') return decompress
def _read_directory(archive): try: fp = _io.open_code(archive) except OSError: raise ZipImportError(f"can't open Zip file: {archive!r}", path=archive) with fp: try: fp.seek(-END_CENTRAL_DIR_SIZE, 2) header_position = fp.tell() buffer = fp.read(END_CENTRAL_DIR_SIZE) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if len(buffer) != END_CENTRAL_DIR_SIZE: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if buffer[:4] != STRING_END_ARCHIVE: # Bad: End of Central Dir signature # Check if there's a comment. try: fp.seek(0, 2) file_size = fp.tell() except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) max_comment_start = max( file_size - MAX_COMMENT_LEN - END_CENTRAL_DIR_SIZE, 0) try: fp.seek(max_comment_start) data = fp.read() except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) pos = data.rfind(STRING_END_ARCHIVE) if pos < 0: raise ZipImportError(f'not a Zip file: {archive!r}', path=archive) buffer = data[pos:pos + END_CENTRAL_DIR_SIZE] if len(buffer) != END_CENTRAL_DIR_SIZE: raise ZipImportError(f"corrupt Zip file: {archive!r}", path=archive) header_position = file_size - len(data) + pos header_size = _unpack_uint32(buffer[12:16]) header_offset = _unpack_uint32(buffer[16:20]) if header_position < header_size: raise ZipImportError(f'bad central directory size: {archive!r}', path=archive) if header_position < header_offset: raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive) header_position -= header_size arc_offset = header_position - header_offset if arc_offset < 0: raise ZipImportError( f'bad central directory size or offset: {archive!r}', path=archive) files = {} # Start of Central Directory count = 0 try: fp.seek(header_position) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) while True: buffer = fp.read(46) if len(buffer) < 4: raise EOFError('EOF read where not expected') # Start of file header if buffer[:4] != b'PK\x01\x02': break # Bad: Central Dir File Header if len(buffer) != 46: raise EOFError('EOF read where not expected') flags = _unpack_uint16(buffer[8:10]) compress = _unpack_uint16(buffer[10:12]) time = _unpack_uint16(buffer[12:14]) date = _unpack_uint16(buffer[14:16]) crc = _unpack_uint32(buffer[16:20]) data_size = _unpack_uint32(buffer[20:24]) file_size = _unpack_uint32(buffer[24:28]) name_size = _unpack_uint16(buffer[28:30]) extra_size = _unpack_uint16(buffer[30:32]) comment_size = _unpack_uint16(buffer[32:34]) file_offset = _unpack_uint32(buffer[42:46]) header_size = name_size + extra_size + comment_size if file_offset > header_offset: raise ZipImportError(f'bad local header offset: {archive!r}', path=archive) file_offset += arc_offset try: name = fp.read(name_size) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if len(name) != name_size: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) # On Windows, calling fseek to skip over the fields we don't use is # slower than reading the data because fseek flushes stdio's # internal buffers. See issue #8745. try: if len(fp.read(header_size - name_size)) != header_size - name_size: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if flags & 0x800: # UTF-8 file names extension name = name.decode() else: # Historical ZIP filename encoding try: name = name.decode('ascii') except UnicodeDecodeError: name = name.decode('latin1').translate(cp437_table) name = name.replace('/', path_sep) path = _bootstrap_external._path_join(archive, name) t = (path, compress, data_size, file_size, file_offset, time, date, crc) files[name] = t count += 1 _bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive) return files
def exec_module(self, module): _call_with_frames_removed(_imp.exec_dynamic, module) _verbose_message( 'extension module {self.name!r} executed from {self.path!r}')
def create_module(self, spec): module = _call_with_frames_removed(_imp.create_dynamic, spec) _verbose_message( f'extension module {spec.name!r} loaded from {self.path!r}') return module
def _read_directory(archive): try: fp = _io.open(archive, 'rb') except OSError: raise ZipImportError(f"can't open Zip file: {archive!r}", path=archive) with fp: try: fp.seek(-END_CENTRAL_DIR_SIZE, 2) header_position = fp.tell() buffer = fp.read(END_CENTRAL_DIR_SIZE) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if len(buffer) != END_CENTRAL_DIR_SIZE: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if buffer[:4] != STRING_END_ARCHIVE: # Bad: End of Central Dir signature # Check if there's a comment. try: fp.seek(0, 2) file_size = fp.tell() except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) max_comment_start = max(file_size - MAX_COMMENT_LEN - END_CENTRAL_DIR_SIZE, 0) try: fp.seek(max_comment_start) data = fp.read() except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) pos = data.rfind(STRING_END_ARCHIVE) if pos < 0: raise ZipImportError(f'not a Zip file: {archive!r}', path=archive) buffer = data[pos:pos+END_CENTRAL_DIR_SIZE] if len(buffer) != END_CENTRAL_DIR_SIZE: raise ZipImportError(f"corrupt Zip file: {archive!r}", path=archive) header_position = file_size - len(data) + pos header_size = _unpack_uint32(buffer[12:16]) header_offset = _unpack_uint32(buffer[16:20]) if header_position < header_size: raise ZipImportError(f'bad central directory size: {archive!r}', path=archive) if header_position < header_offset: raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive) header_position -= header_size arc_offset = header_position - header_offset if arc_offset < 0: raise ZipImportError(f'bad central directory size or offset: {archive!r}', path=archive) files = {} # Start of Central Directory count = 0 try: fp.seek(header_position) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) while True: buffer = fp.read(46) if len(buffer) < 4: raise EOFError('EOF read where not expected') # Start of file header if buffer[:4] != b'PK\x01\x02': break # Bad: Central Dir File Header if len(buffer) != 46: raise EOFError('EOF read where not expected') flags = _unpack_uint16(buffer[8:10]) compress = _unpack_uint16(buffer[10:12]) time = _unpack_uint16(buffer[12:14]) date = _unpack_uint16(buffer[14:16]) crc = _unpack_uint32(buffer[16:20]) data_size = _unpack_uint32(buffer[20:24]) file_size = _unpack_uint32(buffer[24:28]) name_size = _unpack_uint16(buffer[28:30]) extra_size = _unpack_uint16(buffer[30:32]) comment_size = _unpack_uint16(buffer[32:34]) file_offset = _unpack_uint32(buffer[42:46]) header_size = name_size + extra_size + comment_size if file_offset > header_offset: raise ZipImportError(f'bad local header offset: {archive!r}', path=archive) file_offset += arc_offset try: name = fp.read(name_size) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if len(name) != name_size: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) # On Windows, calling fseek to skip over the fields we don't use is # slower than reading the data because fseek flushes stdio's # internal buffers. See issue #8745. try: if len(fp.read(header_size - name_size)) != header_size - name_size: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) if flags & 0x800: # UTF-8 file names extension name = name.decode() else: # Historical ZIP filename encoding try: name = name.decode('ascii') except UnicodeDecodeError: name = name.decode('latin1').translate(cp437_table) name = name.replace('/', path_sep) path = _bootstrap_external._path_join(archive, name) t = (path, compress, data_size, file_size, file_offset, time, date, crc) files[name] = t count += 1 _bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive) return files