def consider_add_dependency(self, target_path, dep, search_path=None): """Considers adding a dependency library. Returns the target_path if it was added, which may be different from target_path if it was already added earlier, or None if it wasn't.""" if dep in self.dep_paths: # Already considered this. return self.dep_paths[dep] self.dep_paths[dep] = None if dep in self.ignore_deps: if GetVerbose(): print("Ignoring {0} (explicitly ignored)".format(dep)) return if not self.platform.startswith("android"): if dep.lower().startswith("python") or os.path.basename( dep).startswith("libpython"): if GetVerbose(): print("Ignoring {0} (explicitly ignored)".format(dep)) return if self.platform.startswith("macosx"): if dep.endswith(".so"): # Temporary hack for 1.9, which had link deps on modules. return if dep.startswith("/System/"): return if dep.startswith('/'): source_path = dep else: source_path = None if search_path is None: search_path = self.lib_path for lib_dir in search_path: # Ignore static stuff. path = os.path.join(lib_dir, dep) if os.path.isfile(path): source_path = os.path.normpath(path) break if not source_path: # Couldn't find library in the panda3d lib dir. if GetVerbose(): print("Ignoring {0} (not in search path)".format(dep)) return self.dep_paths[dep] = target_path self.write_file(target_path, source_path) return target_path
def write_file_data(self, target_path, source_data): """Adds the given file from a string.""" sha = hashlib.sha256() sha.update(source_data.encode()) digest = urlsafe_b64encode(sha.digest()).decode('ascii') digest = digest.rstrip('=') self.records.append("{0},sha256={1},{2}\n".format(target_path, digest, len(source_data))) if GetVerbose(): print("Adding %s from data" % target_path) self.zip_file.writestr(target_path, source_data)
def write_file_data(self, target_path, source_data): """Adds the given file from a string.""" sha = hashlib.sha256() sha.update(source_data.encode()) digest = urlsafe_b64encode(sha.digest()).decode('ascii') digest = digest.rstrip('=') self.records.append("{0},sha256={1},{2}\n".format( target_path, digest, len(source_data))) if GetVerbose(): print("Adding %s from data" % target_path) zinfo = zipfile.ZipInfo(filename=target_path, date_time=self.max_date_time) zinfo.compress_type = self.zip_file.compression zinfo.external_attr = 0o600 << 16 self.zip_file.writestr(zinfo, source_data)
def write_file(self, target_path, source_path): """Adds the given file to the .whl file.""" orig_source_path = source_path # If this is a .so file, we should set the rpath appropriately. temp = None basename, ext = os.path.splitext(source_path) if ext in ('.so', '.dylib') or '.so.' in os.path.basename(source_path) or \ (not ext and is_executable(source_path)): # Scan Unix dependencies. if target_path not in IGNORE_UNIX_DEPS_OF: deps = scan_dependencies(source_path) else: deps = [] suffix = '' if '.so' in os.path.basename(source_path): suffix = '.so' elif ext == '.dylib': suffix = '.dylib' temp = tempfile.NamedTemporaryFile(suffix=suffix, prefix='whl', delete=False) # On macOS, if no fat wheel was requested, extract the right architecture. if sys.platform == "darwin" and is_fat_file(source_path) \ and not self.platform.endswith("_intel") \ and "_fat" not in self.platform: if self.platform.endswith("_x86_64"): arch = 'x86_64' else: arch = self.platform.split('_')[-1] subprocess.call([ 'lipo', source_path, '-extract', arch, '-output', temp.name ]) else: # Otherwise, just copy it over. temp.write(open(source_path, 'rb').read()) temp.close() os.chmod(temp.name, os.stat(temp.name).st_mode | 0o711) # Now add dependencies. On macOS, fix @loader_path references. if sys.platform == "darwin": if source_path.endswith('deploy-stubw'): deps_path = '@executable_path/../Frameworks' else: deps_path = '@loader_path' loader_path = [os.path.dirname(source_path)] for dep in deps: if dep.endswith('/Python'): # If this references the Python framework, change it # to reference libpython instead. new_dep = deps_path + '/libpython{0}.{1}.dylib'.format( *sys.version_info) elif '@loader_path' in dep: dep_path = dep.replace('@loader_path', '.') target_dep = os.path.dirname( target_path) + '/' + os.path.basename(dep) target_dep = self.consider_add_dependency( target_dep, dep_path, loader_path) if not target_dep: # It won't be included, so no use adjusting the path. continue new_dep = os.path.join( deps_path, os.path.relpath(target_dep, os.path.dirname(target_path))) elif dep.startswith( '/Library/Frameworks/Python.framework/'): # Add this dependency if it's in the Python directory. target_dep = os.path.dirname( target_path) + '/' + os.path.basename(dep) target_dep = self.consider_add_dependency( target_dep, dep, loader_path) if not target_dep: # It won't be included, so no use adjusting the path. continue new_dep = os.path.join( deps_path, os.path.relpath(target_dep, os.path.dirname(target_path))) else: if '/' in dep: if GetVerbose(): print("Ignoring dependency %s" % (dep)) continue subprocess.call([ "install_name_tool", "-change", dep, new_dep, temp.name ]) else: # On other unixes, we just add dependencies normally. for dep in deps: # Only include dependencies with relative path, for now. if '/' not in dep: target_dep = os.path.dirname(target_path) + '/' + dep self.consider_add_dependency(target_dep, dep) subprocess.call(["strip", "-s", temp.name]) subprocess.call( ["patchelf", "--set-rpath", "$ORIGIN", temp.name]) source_path = temp.name ext = ext.lower() if ext in ('.dll', '.pyd', '.exe'): # Scan and add Win32 dependencies. for dep in scan_dependencies(source_path): target_dep = os.path.dirname(target_path) + '/' + dep self.consider_add_dependency(target_dep, dep) # Calculate the SHA-256 hash and size. sha = hashlib.sha256() fp = open(source_path, 'rb') size = 0 data = fp.read(1024 * 1024) while data: size += len(data) sha.update(data) data = fp.read(1024 * 1024) fp.close() # Save it in PEP-0376 format for writing out later. digest = urlsafe_b64encode(sha.digest()).decode('ascii') digest = digest.rstrip('=') self.records.append("{0},sha256={1},{2}\n".format( target_path, digest, size)) if GetVerbose(): print("Adding {0} from {1}".format(target_path, orig_source_path)) self.zip_file.write(source_path, target_path)
def write_file(self, target_path, source_path): """Adds the given file to the .whl file.""" # If this is a .so file, we should set the rpath appropriately. temp = None ext = os.path.splitext(source_path)[1] if ext in ('.so', '.dylib') or '.so.' in os.path.basename(source_path) or \ (not ext and is_executable(source_path)): # Scan and add Unix dependencies. deps = scan_dependencies(source_path) for dep in deps: # Only include dependencies with relative path. Otherwise we # end up overwriting system files like /lib/ld-linux.so.2! # Yes, it happened to me. if '/' not in dep: target_dep = os.path.dirname(target_path) + '/' + dep self.consider_add_dependency(target_dep, dep) suffix = '' if '.so' in os.path.basename(source_path): suffix = '.so' elif ext == '.dylib': suffix = '.dylib' temp = tempfile.NamedTemporaryFile(suffix=suffix, prefix='whl', delete=False) # On macOS, if no fat wheel was requested, extract the right architecture. if sys.platform == "darwin" and is_fat_file( source_path) and not self.platform.endswith("_intel"): if self.platform.endswith("_x86_64"): arch = 'x86_64' else: arch = self.platform.split('_')[-1] subprocess.call([ 'lipo', source_path, '-extract', arch, '-output', temp.name ]) else: # Otherwise, just copy it over. temp.write(open(source_path, 'rb').read()) temp.write(open(source_path, 'rb').read()) os.fchmod(temp.fileno(), os.fstat(temp.fileno()).st_mode | 0o111) temp.close() # Fix things like @loader_path/../lib references if sys.platform == "darwin": loader_path = [os.path.dirname(source_path)] for dep in deps: if '@loader_path' not in dep: continue dep_path = dep.replace('@loader_path', '.') target_dep = os.path.dirname( target_path) + '/' + os.path.basename(dep) target_dep = self.consider_add_dependency( target_dep, dep_path, loader_path) if not target_dep: # It won't be included, so no use adjusting the path. continue new_dep = os.path.join( '@loader_path', os.path.relpath(target_dep, os.path.dirname(target_path))) subprocess.call([ "install_name_tool", "-change", dep, new_dep, temp.name ]) else: subprocess.call(["strip", "-s", temp.name]) subprocess.call( ["patchelf", "--set-rpath", "$ORIGIN", temp.name]) source_path = temp.name ext = ext.lower() if ext in ('.dll', '.pyd', '.exe'): # Scan and add Win32 dependencies. for dep in scan_dependencies(source_path): target_dep = os.path.dirname(target_path) + '/' + dep self.consider_add_dependency(target_dep, dep) # Calculate the SHA-256 hash and size. sha = hashlib.sha256() fp = open(source_path, 'rb') size = 0 data = fp.read(1024 * 1024) while data: size += len(data) sha.update(data) data = fp.read(1024 * 1024) fp.close() # Save it in PEP-0376 format for writing out later. digest = str(urlsafe_b64encode(sha.digest())) digest = digest.rstrip('=') self.records.append("{0},sha256={1},{2}\n".format( target_path, digest, size)) if GetVerbose(): print("Adding %s from %s" % (target_path, source_path)) self.zip_file.write(source_path, target_path)
def write_file(self, target_path, source_path): """Adds the given file to the .whl file.""" orig_source_path = source_path # If this is a .so file, we should set the rpath appropriately. temp = None basename, ext = os.path.splitext(source_path) if ext in ('.so', '.dylib') or '.so.' in os.path.basename(source_path) or \ (not ext and is_executable(source_path)): # Scan Unix dependencies. if target_path not in IGNORE_UNIX_DEPS_OF: deps = scan_dependencies(source_path) else: deps = [] suffix = '' if '.so' in os.path.basename(source_path): suffix = '.so' elif ext == '.dylib': suffix = '.dylib' temp = tempfile.NamedTemporaryFile(suffix=suffix, prefix='whl', delete=False) # On macOS, if no fat wheel was requested, extract the right architecture. if self.platform.startswith("macosx") and is_fat_file(source_path) \ and not self.platform.endswith("_intel") \ and "_fat" not in self.platform \ and "_universal" not in self.platform: if self.platform.endswith("_x86_64"): arch = 'x86_64' else: arch = self.platform.split('_')[-1] subprocess.call([ 'lipo', source_path, '-extract', arch, '-output', temp.name ]) else: # Otherwise, just copy it over. temp.write(open(source_path, 'rb').read()) temp.close() os.chmod(temp.name, os.stat(temp.name).st_mode | 0o711) # Now add dependencies. On macOS, fix @loader_path references. if self.platform.startswith("macosx"): if source_path.endswith('deploy-stubw'): deps_path = '@executable_path/../Frameworks' else: deps_path = '@loader_path' loader_path = [os.path.dirname(source_path)] for dep in deps: if dep.endswith('/Python'): # If this references the Python framework, change it # to reference libpython instead. new_dep = deps_path + '/libpython{0}.{1}.dylib'.format( *sys.version_info) elif '@loader_path' in dep: dep_path = dep.replace('@loader_path', '.') target_dep = os.path.dirname( target_path) + '/' + os.path.basename(dep) target_dep = self.consider_add_dependency( target_dep, dep_path, loader_path) if not target_dep: # It won't be included, so no use adjusting the path. continue new_dep = os.path.join( deps_path, os.path.relpath(target_dep, os.path.dirname(target_path))) elif '@rpath' in dep: # Unlike makepanda, CMake uses @rpath instead of # @loader_path. This means we can just search for the # dependencies like normal. dep_path = dep.replace('@rpath', '.') target_dep = os.path.dirname( target_path) + '/' + os.path.basename(dep) self.consider_add_dependency(target_dep, dep_path) continue elif dep.startswith( '/Library/Frameworks/Python.framework/'): # Add this dependency if it's in the Python directory. target_dep = os.path.dirname( target_path) + '/' + os.path.basename(dep) target_dep = self.consider_add_dependency( target_dep, dep, loader_path) if not target_dep: # It won't be included, so no use adjusting the path. continue new_dep = os.path.join( deps_path, os.path.relpath(target_dep, os.path.dirname(target_path))) else: if '/' in dep: if GetVerbose(): print("Ignoring dependency %s" % (dep)) continue subprocess.call([ "install_name_tool", "-change", dep, new_dep, temp.name ]) # Make sure it has an ad-hoc code signature. subprocess.call(["codesign", "-f", "-s", "-", temp.name]) else: # On other unixes, we just add dependencies normally. for dep in deps: # Only include dependencies with relative path, for now. if '/' in dep: continue if self.platform.startswith('android') and '.so.' in dep: # Change .so.1.2 suffix to .so, to allow loading in .apk new_dep = dep.rpartition('.so.')[0] + '.so' subprocess.call([ "patchelf", "--replace-needed", dep, new_dep, temp.name ]) target_dep = os.path.dirname( target_path) + '/' + new_dep else: target_dep = os.path.dirname(target_path) + '/' + dep self.consider_add_dependency(target_dep, dep) subprocess.call([GetStrip(), "-s", temp.name]) if self.platform.startswith('android'): # We must link explicitly with Python, because the usual # -rdynamic trick doesn't work from a shared library loaded # through ANativeActivity. if suffix == '.so' and not os.path.basename( source_path).startswith('lib'): pylib_name = "libpython" + get_config_var( 'LDVERSION') + ".so" subprocess.call([ "patchelf", "--add-needed", pylib_name, temp.name ]) else: # On other systems, we use the rpath to force it to locate # dependencies in the same directory. subprocess.call([ "patchelf", "--force-rpath", "--set-rpath", "$ORIGIN", temp.name ]) source_path = temp.name ext = ext.lower() if ext in ('.dll', '.pyd', '.exe'): # Scan and add Win32 dependencies. for dep in scan_dependencies(source_path): target_dep = os.path.dirname(target_path) + '/' + dep self.consider_add_dependency(target_dep, dep) if GetVerbose(): print("Adding {0} from {1}".format(target_path, orig_source_path)) zinfo = zipfile.ZipInfo.from_file(source_path, target_path) zinfo.compress_type = self.zip_file.compression if zinfo.date_time > self.max_date_time: zinfo.date_time = self.max_date_time # Copy the data to the zip file, while also calculating the SHA-256. size = 0 sha = hashlib.sha256() with open(source_path, 'rb') as source_fp, self.zip_file.open(zinfo, 'w') as target_fp: data = source_fp.read(1024 * 1024) while data: size += len(data) target_fp.write(data) sha.update(data) data = source_fp.read(1024 * 1024) # Save it in PEP-0376 format for writing out later. digest = urlsafe_b64encode(sha.digest()).decode('ascii') digest = digest.rstrip('=') self.records.append("{0},sha256={1},{2}\n".format( target_path, digest, size))