def _getImports_ldd(pth): """ Find the binary dependencies of PTH. This implementation is for ldd platforms (mostly unix). """ rslt = set() if is_aix: # Match libs of the form 'archive.a(sharedobject.so)' # Will not match the fake lib '/unix' lddPattern = re.compile(r"\s*(.*?)(\(.*\))") else: lddPattern = re.compile(r"\s*(.*?)\s+=>\s+(.*?)\s+\(.*\)") for line in compat.exec_command("ldd", pth).splitlines(): m = lddPattern.search(line) if m: if is_aix: lib = m.group(1) name = os.path.basename(lib) + m.group(2) else: name, lib = m.group(1), m.group(2) if name[:10] in ("linux-gate", "linux-vdso"): # linux-gate is a fake library which does not exist and # should be ignored. See also: # http://www.trilithium.com/johan/2005/08/linux-gate/ continue if os.path.exists(lib): # Add lib if it is not already found. if lib not in rslt: rslt.add(lib) else: logger.error("Can not find %s in path %s (needed by %s)", name, lib, pth) return rslt
def test_UPX(config, upx_dir): logger.debug('Testing for UPX ...') cmd = "upx" if upx_dir: cmd = os.path.normpath(os.path.join(upx_dir, cmd)) hasUPX = 0 try: vers = compat.exec_command( cmd, '-V', __raise_ENOENT__=True).strip().splitlines() if vers: v = vers[0].split()[1] try: # v = "3.96-git-d7ba31cab8ce" v = v.split("-")[0] except Exception: pass hasUPX = tuple(map(int, v.split("."))) if is_win and hasUPX < (1, 92): logger.error('UPX is too old! Python 2.4 under Windows requires UPX 1.92+') hasUPX = 0 except Exception as e: if isinstance(e, OSError) and e.errno == 2: # No such file or directory pass else: logger.info('An exception occured when testing for UPX:') logger.info(' %r', e) if hasUPX: is_available = 'available' else: is_available = 'not available' logger.info('UPX is %s.', is_available) config['hasUPX'] = hasUPX config['upx_dir'] = upx_dir
def get_repo_revision(): ''' Returns git revision string somelike `git rev-parse --short HEAD` does. Returns an empty string if anything goes wrong, such as missing .hg files or an unexpected format of internal HG files or no mercurial repository found. ''' repopath = _findrepo() if not repopath: return '' try: head = open(os.path.join(repopath, 'HEAD'), 'rU').read() for l in head.splitlines(): l = l.split() if l[0] == 'ref:': ref = l[1] break else: ref = None if ref: rev = open(os.path.join(repopath, ref), 'rU').read() rev = rev[:7] if rev: return rev except IOError: pass try: rev = compat.exec_command('git', 'rev-parse', '--short', 'HEAD').strip() if rev: return rev except: pass return ''
def get_repo_revision(): path = os.path # shortcut gitdir = path.normpath(path.join(path.dirname(os.path.abspath(__file__)), '..', '..', '.git')) cwd = os.path.dirname(gitdir) if not path.exists(gitdir): try: from PyInstaller.utils._gitrevision import rev if not rev.startswith('$'): # the format specifier has been substituted return '+' + rev except ImportError: pass return '' try: # need to update index first to get reliable state exec_command_rc('git', 'update-index', '-q', '--refresh', cwd=cwd) recent = exec_command('git', 'describe', '--long', '--dirty', '--tag', cwd=cwd).strip() if recent.endswith('-dirty'): tag, changes, rev, dirty = recent.rsplit('-', 3) rev = rev + '.mod' else: tag, changes, rev = recent.rsplit('-', 2) if changes == '0': return '' # According to PEP440, local version identifier starts with '+'. return '+' + rev except (FileNotFoundError, WindowsError): # Be silent when git command is not found. pass return ''
def test_UPX(config, upx_dir): logger.debug('Testing for UPX ...') cmd = "upx" if upx_dir: cmd = os.path.normpath(os.path.join(upx_dir, cmd)) hasUPX = 0 try: vers = compat.exec_command(cmd, '-V').strip().splitlines() if vers: v = vers[0].split()[1] hasUPX = tuple(map(int, v.split("."))) if is_win and hasUPX < (1, 92): logger.error('UPX is too old! Python 2.4 under Windows requires UPX 1.92+') hasUPX = 0 except Exception as e: if isinstance(e, OSError) and e.errno == 2: # No such file or directory pass else: logger.info('An exception occured when testing for UPX:') logger.info(' %r', e) if hasUPX: is_available = 'available' else: is_available = 'not available' logger.info('UPX is %s.', is_available) config['hasUPX'] = hasUPX config['upx_dir'] = upx_dir
def get_repo_revision(): try: rev = compat.exec_command('git', 'rev-parse', '--short', 'HEAD').strip() if rev: return rev except: pass return ''
def getSoname(filename): """ Return the soname of a library. """ cmd = ["objdump", "-p", "-j", ".dynamic", filename] m = re.search(r'\s+SONAME\s+([^\s]+)', compat.exec_command(*cmd)) if m: return m.group(1)
def get_repo_revision(): try: rev = compat.exec_command("git", "rev-parse", "--short", "HEAD").strip() if rev: return rev except: pass return ""
def _getImports_ldd(pth): """ Find the binary dependencies of PTH. This implementation is for ldd platforms (mostly unix). """ rslt = set() if is_aix: # Match libs of the form # 'archivelib.a(objectmember.so/.o)' # or # 'sharedlib.so' # Will not match the fake lib '/unix' lddPattern = re.compile( r"^\s*(((?P<libarchive>(.*\.a))(?P<objectmember>\(.*\)))|((?P<libshared>(.*\.so))))$" ) elif is_solar: # Match libs of the form # 'sharedlib.so => full-path-to-lib # e.g. # 'libpython2.7.so.1.0 => /usr/local/lib/libpython2.7.so.1.0' # Will not match the platform specific libs starting with '/platform' lddPattern = re.compile(r"^\s+(.*)\s+=>\s+(.*)$") else: lddPattern = re.compile(r"\s*(.*?)\s+=>\s+(.*?)\s+\(.*\)") for line in compat.exec_command('ldd', pth).splitlines(): m = lddPattern.search(line) if m: if is_aix: libarchive = m.group('libarchive') if libarchive: # We matched an archive lib with a request for a particular # embedded shared object. # 'archivelib.a(objectmember.so/.o)' lib = libarchive name = os.path.basename(lib) + m.group('objectmember') else: # We matched a stand-alone shared library. # 'sharedlib.so' lib = m.group('libshared') name = os.path.basename(lib) else: name, lib = m.group(1), m.group(2) if name[:10] in ('linux-gate', 'linux-vdso'): # linux-gate is a fake library which does not exist and # should be ignored. See also: # http://www.trilithium.com/johan/2005/08/linux-gate/ continue if os.path.exists(lib): # Add lib if it is not already found. if lib not in rslt: rslt.add(lib) else: logger.error('Can not find %s in path %s (needed by %s)', name, lib, pth) return rslt
def sign_source_distribution(data): """ Sign the tgz or zip archive that will be uploaded to PYPI. :param data: """ print() # zest.releaser does a clean checkout where it generates tgz/zip in 'dist' directory and those files will be then # uploaded to pypi. dist_dir = os.path.join(data['tagdir'], 'dist') cmd = ['gpg', '--detach-sign', '--armor'] if getenv("PYINSTALLER_CODESIGNING_ID"): print("Using gpg identity", getenv("PYINSTALLER_CODESIGNING_ID"), "for signing.") cmd.extend(['--local-user', getenv("PYINSTALLER_CODESIGNING_ID")]) # Sign all files in 'dist' directory. for f in os.listdir(dist_dir): f = os.path.join(dist_dir, f) print('Signing file %s' % f) exec_command(*cmd + [f])
def _getImports_ldd(pth): """ Find the binary dependencies of PTH. This implementation is for ldd platforms (mostly unix). """ rslt = set() if is_aix: # Match libs of the form # 'archivelib.a(objectmember.so/.o)' # or # 'sharedlib.so' # Will not match the fake lib '/unix' lddPattern = re.compile(r"^\s*(((?P<libarchive>(.*\.a))(?P<objectmember>\(.*\)))|((?P<libshared>(.*\.so))))$") elif is_solar: # Match libs of the form # 'sharedlib.so => full-path-to-lib # e.g. # 'libpython2.7.so.1.0 => /usr/local/lib/libpython2.7.so.1.0' # Will not match the platform specific libs starting with '/platform' lddPattern = re.compile(r"^\s+(.*)\s+=>\s+(.*)$") else: lddPattern = re.compile(r"\s*(.*?)\s+=>\s+(.*?)\s+\(.*\)") for line in compat.exec_command('ldd', pth).splitlines(): m = lddPattern.search(line) if m: if is_aix: libarchive = m.group('libarchive') if libarchive: # We matched an archive lib with a request for a particular # embedded shared object. # 'archivelib.a(objectmember.so/.o)' lib = libarchive name = os.path.basename(lib) + m.group('objectmember') else: # We matched a stand-alone shared library. # 'sharedlib.so' lib = m.group('libshared') name = os.path.basename(lib) else: name, lib = m.group(1), m.group(2) if name[:10] in ('linux-gate', 'linux-vdso'): # linux-gate is a fake library which does not exist and # should be ignored. See also: # http://www.trilithium.com/johan/2005/08/linux-gate/ continue if os.path.exists(lib): # Add lib if it is not already found. if lib not in rslt: rslt.add(lib) else: logger.error('Can not find %s in path %s (needed by %s)', name, lib, pth) return rslt
def _get_so_name(filename): """ Return the soname of a library. Soname is usefull whene there are multiple symplinks to one library. """ # TODO verify that objdump works on other unixes and not Linux only. cmd = ["objdump", "-p", filename] m = re.search(r'\s+SONAME\s+([^\s]+)', compat.exec_command(*cmd)) return m.group(1)
def findLibrary(name): """ Look for a library in the system. Emulate the algorithm used by dlopen. `name`must include the prefix, e.g. ``libpython2.4.so`` """ assert is_unix, "Current implementation for Unix only (Linux, Solaris, AIX)" lib = None # Look in the LD_LIBRARY_PATH according to platform. if is_aix: lp = compat.getenv('LIBPATH', '') elif is_darwin: lp = compat.getenv('DYLD_LIBRARY_PATH', '') else: lp = compat.getenv('LD_LIBRARY_PATH', '') for path in lp.split(os.pathsep): libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # Look in /etc/ld.so.cache # TODO Look for ldconfig in /usr/sbin/ldconfig. /sbin is deprecated # in recent linux distributions. # Solaris does not have /sbin/ldconfig. Just check if this file exists. if lib is None and os.path.exists('/sbin/ldconfig'): expr = r'/[^\(\)\s]*%s\.[^\(\)\s]*' % re.escape(name) m = re.search(expr, compat.exec_command('/sbin/ldconfig', '-p')) if m: lib = m.group(0) # Look in the known safe paths if lib is None: paths = ['/lib', '/usr/lib'] if is_aix: paths.append('/opt/freeware/lib') for path in paths: libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # give up :( if lib is None: return None # Resolve the file name into the soname dir = os.path.dirname(lib) return os.path.join(dir, getSoname(lib))
def qt5_qml_dir(): try: qmldir = compat.exec_command("qmake", "-query", "QT_INSTALL_QML") except IOError: qmldir = '' if len(qmldir) == 0: logger.error('Cannot find QT_INSTALL_QML directory, "qmake -query ' + 'QT_INSTALL_QML" returned nothing') elif not os.path.exists(qmldir): logger.error("Directory QT_INSTALL_QML: %s doesn't exist" % qmldir) # 'qmake -query' uses / as the path separator, even on Windows qmldir = os.path.normpath(qmldir) return qmldir
def test_logs(self): """ Compare log files (now used only by multipackage test_name). Return True if .toc files match or when .toc patters are not defined. """ logsfn = glob.glob(self.test_file + '.toc') # Other main scripts do not start with 'test_'. logsfn += glob.glob(self.test_file.split('_', 1)[1] + '_?.toc') for logfn in logsfn: self._msg("EXECUTING MATCHING " + logfn) tmpname = os.path.splitext(logfn)[0] prog = self._find_exepath(tmpname) if prog is None: prog = self._find_exepath(tmpname, os.path.join('dist', self.test_file)) if _virtual_env_: fname_list = compat.exec_command( 'pyi-archive_viewer', '-b', '-r', prog) else: fname_list = compat.exec_python( os.path.join(HOMEPATH, 'utils', 'archive_viewer.py'), '-b', '-r', prog) # Fix line-endings so eval() does not fail. fname_list = fname_list.replace('\r\n', '\n').replace('\n\r', '\n') fname_list = eval(fname_list) pattern_list = eval(open(logfn, 'rU').read()) # Alphabetical order of patterns. pattern_list.sort() count = 0 for pattern in pattern_list: found = False for fname in fname_list: if re.match(pattern, fname): count += 1 found = True self._plain_msg('MATCH: %s --> %s' % (pattern, fname)) break if not found: self._plain_msg('MISSING: %s' % pattern) # Not all modules matched. # Stop comparing other .toc files and fail the test. if count < len(pattern_list): return False return True
def test_logs(self): """ Compare log files (now used only by multipackage test_name). Return True if .toc files match or when .toc patters are not defined. """ logsfn = glob.glob(self.test_file + '.toc') # Other main scripts do not start with 'test_'. logsfn += glob.glob(self.test_file.split('_', 1)[1] + '_?.toc') for logfn in logsfn: self._msg("EXECUTING MATCHING " + logfn) tmpname = os.path.splitext(logfn)[0] prog = self._find_exepath(tmpname) if prog is None: prog = self._find_exepath(tmpname, os.path.join('dist', self.test_file)) if _virtual_env_: fname_list = compat.exec_command('pyi-archive_viewer', '-b', '-r', prog) else: fname_list = compat.exec_python( os.path.join(HOMEPATH, 'utils', 'archive_viewer.py'), '-b', '-r', prog) # Fix line-endings so eval() does not fail. fname_list = fname_list.replace('\r\n', '\n').replace('\n\r', '\n') fname_list = eval(fname_list) pattern_list = eval(open(logfn, 'rU').read()) # Alphabetical order of patterns. pattern_list.sort() count = 0 for pattern in pattern_list: found = False for fname in fname_list: if re.match(pattern, fname): count += 1 found = True self._plain_msg('MATCH: %s --> %s' % (pattern, fname)) break if not found: self._plain_msg('MISSING: %s' % pattern) # Not all modules matched. # Stop comparing other .toc files and fail the test. if count < len(pattern_list): return False return True
def get_repo_revision(): path = os.path # shortcut gitdir = path.normpath(path.join(path.dirname(__file__), '..','..', '.git')) if not path.exists(gitdir): return '' try: rev = compat.exec_command('git', 'rev-parse', '--short', 'HEAD').strip() if rev: # need to update index first to get reliable state compat.exec_command_rc('git', 'update-index', '-q', '--refresh') changed = compat.exec_command_rc('git', 'diff-index', '--quiet', 'HEAD') if changed: rev = rev + '-mod' return rev except: pass return ''
def findLibrary(name): """ Look for a library in the system. Emulate the algorithm used by dlopen. `name`must include the prefix, e.g. ``libpython2.4.so`` """ assert is_unix, "Current implementation for Unix only (Linux, Solaris, AIX)" lib = None # Look in the LD_LIBRARY_PATH lp = compat.getenv('LD_LIBRARY_PATH', '') for path in lp.split(os.pathsep): libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # Look in /etc/ld.so.cache if lib is None: expr = r'/[^\(\)\s]*%s\.[^\(\)\s]*' % re.escape(name) m = re.search(expr, compat.exec_command('/sbin/ldconfig', '-p')) if m: lib = m.group(0) # Look in the known safe paths if lib is None: paths = ['/lib', '/usr/lib'] if is_aix: paths.append('/opt/freeware/lib') for path in paths: libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # give up :( if lib is None: return None # Resolve the file name into the soname dir = os.path.dirname(lib) return os.path.join(dir, getSoname(lib))
def test_UPX(config, upx_dir): logger.info('testing for UPX...') cmd = "upx" if upx_dir: cmd = os.path.normpath(os.path.join(upx_dir, cmd)) hasUPX = 0 try: vers = compat.exec_command(cmd, '-V').strip().splitlines() if vers: v = vers[0].split()[1] hasUPX = tuple(map(int, v.split("."))) if is_win and is_py24 and hasUPX < (1, 92): logger.error('UPX is too old! Python 2.4 under Windows requires UPX 1.92+') hasUPX = 0 logger.info('...UPX %s', ('unavailable', 'available')[hasUPX != 0]) except Exception, e: logger.info('...exception result in testing for UPX') logger.info(' %r %r', e, e.args)
def get_repo_revision(): path = os.path # shortcut gitdir = path.normpath( path.join(path.dirname(__file__), '..', '..', '.git')) if not path.exists(gitdir): return '' try: rev = compat.exec_command('git', 'rev-parse', '--short', 'HEAD').strip() if rev: # need to update index first to get reliable state compat.exec_command_rc('git', 'update-index', '-q', '--refresh') changed = compat.exec_command_rc('git', 'diff-index', '--quiet', 'HEAD') if changed: rev = rev + '-mod' return rev except: pass return ''
def test_UPX(config, upx_dir): logger.debug('Testing for UPX ...') cmd = "upx" if upx_dir: cmd = os.path.normpath(os.path.join(upx_dir, cmd)) hasUPX = 0 try: vers = compat.exec_command(cmd, '-V').strip().splitlines() if vers: v = vers[0].split()[1] hasUPX = tuple(map(int, v.split("."))) if is_win and hasUPX < (1, 92): logger.error('UPX is too old! Python 2.4 under Windows requires UPX 1.92+') hasUPX = 0 except Exception, e: if isinstance(e, OSError) and e.errno == 2: # No such file or directory pass else: logger.info('An exception occured when testing for UPX:') logger.info(' %r', e)
def test_UPX(config, upx_dir): logger.debug("Testing for UPX ...") cmd = "upx" if upx_dir: cmd = os.path.normpath(os.path.join(upx_dir, cmd)) hasUPX = 0 try: vers = compat.exec_command(cmd, "-V").strip().splitlines() if vers: v = vers[0].split()[1] hasUPX = tuple(map(int, v.split("."))) if is_win and is_py24 and hasUPX < (1, 92): logger.error("UPX is too old! Python 2.4 under Windows requires UPX 1.92+") hasUPX = 0 except Exception, e: if isinstance(e, OSError) and e.errno == 2: # No such file or directory pass else: logger.info("An exception occured when testing for UPX:") logger.info(" %r", e)
def _getImports_ldd(pth): """ Find the binary dependencies of PTH. This implementation is for ldd platforms (mostly unix). """ rslt = set() if is_aix: # Match libs of the form 'archive.a(sharedobject.so)' # Will not match the fake lib '/unix' lddPattern = re.compile(r"\s+(.*?)(\(.*\))") else: lddPattern = re.compile(r"\s+(.*?)\s+=>\s+(.*?)\s+\(.*\)") for line in compat.exec_command('ldd', pth).strip().splitlines(): m = lddPattern.search(line) if m: if is_aix: lib = m.group(1) name = os.path.basename(lib) + m.group(2) else: name, lib = m.group(1), m.group(2) if name[:10] in ('linux-gate', 'linux-vdso'): # linux-gate is a fake library which does not exist and # should be ignored. See also: # http://www.trilithium.com/johan/2005/08/linux-gate/ continue if os.path.exists(lib): # Add lib if it is not already found. if lib not in rslt: rslt.add(lib) else: logger.error('Can not find %s in path %s (needed by %s)', name, lib, pth) return rslt
def _getImports_ldd(pth): """ Find the binary dependencies of PTH. This implementation is for ldd platforms (mostly unix). """ rslt = set() if compat.is_aix: # Match libs of the form # 'archivelib.a(objectmember.so/.o)' # or # 'sharedlib.so' # Will not match the fake lib '/unix' lddPattern = re.compile( r"^\s*(((?P<libarchive>(.*\.a))(?P<objectmember>\(.*\)))|((?P<libshared>(.*\.so))))$" ) elif compat.is_hpux: # Match libs of the form # 'sharedlib.so => full-path-to-lib # e.g. # 'libpython2.7.so => /usr/local/lib/hpux32/libpython2.7.so' lddPattern = re.compile(r"^\s+(.*)\s+=>\s+(.*)$") elif compat.is_solar: # Match libs of the form # 'sharedlib.so => full-path-to-lib # e.g. # 'libpython2.7.so.1.0 => /usr/local/lib/libpython2.7.so.1.0' # Will not match the platform specific libs starting with '/platform' lddPattern = re.compile(r"^\s+(.*)\s+=>\s+(.*)$") else: lddPattern = re.compile(r"\s*(.*?)\s+=>\s+(.*?)\s+\(.*\)") for line in compat.exec_command('ldd', pth).splitlines(): m = lddPattern.search(line) if m: if compat.is_aix: libarchive = m.group('libarchive') if libarchive: # We matched an archive lib with a request for a particular # embedded shared object. # 'archivelib.a(objectmember.so/.o)' lib = libarchive name = os.path.basename(lib) + m.group('objectmember') else: # We matched a stand-alone shared library. # 'sharedlib.so' lib = m.group('libshared') name = os.path.basename(lib) elif compat.is_hpux: name, lib = m.group(1), m.group(2) else: name, lib = m.group(1), m.group(2) if name[:10] in ('linux-gate', 'linux-vdso'): # linux-gate is a fake library which does not exist and # should be ignored. See also: # http://www.trilithium.com/johan/2005/08/linux-gate/ continue if compat.is_cygwin: # exclude Windows system library if lib.lower().startswith('/cygdrive/c/windows/system'): continue if os.path.exists(lib): # Add lib if it is not already found. if lib not in rslt: rslt.add(lib) elif dylib.warn_missing_lib(name): logger.warning('Cannot find %s in path %s (needed by %s)', name, lib, pth) elif line.endswith("not found"): # On glibc-based linux distributions, missing libraries # are marked with name.so => not found tokens = line.split('=>') if len(tokens) != 2: continue name = tokens[0].strip() if dylib.warn_missing_lib(name): logger.warning('Cannot find %s (needed by %s)', name, pth) return rslt
"PyQt5.QtGui", "PyQt5.QtNetwork", "PyQt5.QtWebChannel", "PyQt5.QtWebEngineCore", ] # Find the additional files necessary for QtWebEngine. # Currently only implemented for OSX. # Note that for QtWebEngineProcess to be able to find icudtl.dat the bundle_identifier # must be set to 'org.qt-project.Qt.QtWebEngineCore'. This can be done by passing # bundle_identifier='org.qt-project.Qt.QtWebEngineCore' to the BUNDLE command in # the .spec file. FIXME: This is not ideal and a better solution is required. qmake = get_qmake_path('5') if qmake: libdir = compat.exec_command(qmake, "-query", "QT_INSTALL_LIBS").strip() if compat.is_darwin: binaries = [ (os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5',\ 'Helpers', 'QtWebEngineProcess.app', 'Contents', 'MacOS', 'QtWebEngineProcess'), os.path.join('QtWebEngineProcess.app', 'Contents', 'MacOS')) ] resources_dir = os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5', 'Resources') datas = [ (os.path.join(resources_dir, 'icudtl.dat'),''), (os.path.join(resources_dir, 'qtwebengine_resources.pak'), ''), # The distributed Info.plist has LSUIElement set to true, which prevents the # icon from appearing in the dock. (os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5',\
def findLibrary(name): """ Look for a library in the system. Emulate the algorithm used by dlopen. `name`must include the prefix, e.g. ``libpython2.4.so`` """ assert is_unix, ("Current implementation for Unix only (Linux, Solaris, " "AIX, FreeBSD)") lib = None # Look in the LD_LIBRARY_PATH according to platform. if is_aix: lp = compat.getenv('LIBPATH', '') elif is_darwin: lp = compat.getenv('DYLD_LIBRARY_PATH', '') else: lp = compat.getenv('LD_LIBRARY_PATH', '') for path in lp.split(os.pathsep): libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # Look in /etc/ld.so.cache # TODO Look for ldconfig in /usr/sbin/ldconfig. /sbin is deprecated # in recent linux distributions. # Solaris does not have /sbin/ldconfig. Just check if this file exists. if lib is None and os.path.exists('/sbin/ldconfig'): expr = r'/[^\(\)\s]*%s\.[^\(\)\s]*' % re.escape(name) if is_freebsd: # This has a slightly different format than on linux, but the # regex still works. m = re.search(expr, compat.exec_command('/sbin/ldconfig', '-r')) else: m = re.search(expr, compat.exec_command('/sbin/ldconfig', '-p')) if m: lib = m.group(0) # Look in the known safe paths if lib is None: paths = [ '/lib', '/lib32', '/lib64', '/usr/lib', '/usr/lib32', '/usr/lib64' ] # On Debian/Ubuntu /usr/bin/python is linked statically with libpython. # Newer Debian/Ubuntu with multiarch support putsh the libpythonX.Y.so # To paths like /usr/lib/i386-linux-gnu/. try: import sysconfig # Module available only in Python 2.7. arch_subdir = sysconfig.get_config_var('multiarchsubdir') # Ignore if None is returned. if arch_subdir: arch_subdir = os.path.basename(arch_subdir) paths.extend([ os.path.join('/usr/lib', arch_subdir), os.path.join('/usr/lib32', arch_subdir), os.path.join('/usr/lib64', arch_subdir), ]) except ImportError: pass if is_aix: paths.append('/opt/freeware/lib') elif is_freebsd: paths.append('/usr/local/lib') for path in paths: libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # give up :( if lib is None: return None # Resolve the file name into the soname if is_freebsd: # On FreeBSD objdump doesn't show SONAME, so we just return the lib # we've found return lib else: dir = os.path.dirname(lib) return os.path.join(dir, getSoname(lib))
# Find the additional files necessary for QtWebEngine. datas = ( collect_data_files('PySide2', True, os.path.join('Qt', 'resources')) + collect_data_files('PySide2', True, os.path.join('Qt', 'translations')) + [ x for x in collect_data_files('PySide2', False, os.path.join( 'Qt', 'bin')) if x[0].endswith('QtWebEngineProcess.exe') ]) # Note that for QtWebEngineProcess to be able to find icudtl.dat the bundle_identifier # must be set to 'org.qt-project.Qt.QtWebEngineCore'. This can be done by passing # bundle_identifier='org.qt-project.Qt.QtWebEngineCore' to the BUNDLE command in # the .spec file. FIXME: This is not ideal and a better solution is required. qmake = get_qmake_path('5') if qmake: libdir = compat.exec_command(qmake, "-query", "QT_INSTALL_LIBS").strip() if compat.is_darwin: binaries = [(os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5', 'Helpers', 'QtWebEngineProcess.app', 'Contents', 'MacOS', 'QtWebEngineProcess'), os.path.join('QtWebEngineProcess.app', 'Contents', 'MacOS'))] resources_dir = os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5', 'Resources') datas += [ (os.path.join(resources_dir, 'icudtl.dat'), '.'), (os.path.join(resources_dir, 'qtwebengine_resources.pak'), '.'), # The distributed Info.plist has LSUIElement set to true, which prevents the
def findLibrary(name): """ Look for a library in the system. Emulate the algorithm used by dlopen. `name`must include the prefix, e.g. ``libpython2.4.so`` """ assert is_unix, "Current implementation for Unix only (Linux, Solaris, AIX)" lib = None # Look in the LD_LIBRARY_PATH according to platform. if is_aix: lp = compat.getenv('LIBPATH', '') elif is_darwin: lp = compat.getenv('DYLD_LIBRARY_PATH', '') else: lp = compat.getenv('LD_LIBRARY_PATH', '') for path in lp.split(os.pathsep): libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # Look in /etc/ld.so.cache # TODO Look for ldconfig in /usr/sbin/ldconfig. /sbin is deprecated # in recent linux distributions. # Solaris does not have /sbin/ldconfig. Just check if this file exists. if lib is None and os.path.exists('/sbin/ldconfig'): expr = r'/[^\(\)\s]*%s\.[^\(\)\s]*' % re.escape(name) m = re.search(expr, compat.exec_command('/sbin/ldconfig', '-p')) if m: lib = m.group(0) # Look in the known safe paths if lib is None: paths = ['/lib', '/lib32', '/lib64', '/usr/lib', '/usr/lib32', '/usr/lib64'] # On Debian/Ubuntu /usr/bin/python is linked statically with libpython. # Newer Debian/Ubuntu with multiarch support putsh the libpythonX.Y.so # To paths like /usr/lib/i386-linux-gnu/. try: import sysconfig # Module available only in Python 2.7. arch_subdir = sysconfig.get_config_var('multiarchsubdir') # Ignore if None is returned. if arch_subdir: arch_subdir = os.path.basename(arch_subdir) paths.extend([ os.path.join('/usr/lib', arch_subdir), os.path.join('/usr/lib32', arch_subdir), os.path.join('/usr/lib64', arch_subdir), ]) except ImportError: pass if is_aix: paths.append('/opt/freeware/lib') for path in paths: libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # give up :( if lib is None: return None # Resolve the file name into the soname dir = os.path.dirname(lib) return os.path.join(dir, getSoname(lib))
def findLibrary(name): """ Look for a library in the system. Emulate the algorithm used by dlopen. `name`must include the prefix, e.g. ``libpython2.4.so`` """ assert is_unix, "Current implementation for Unix only (Linux, Solaris, " "AIX, FreeBSD)" lib = None # Look in the LD_LIBRARY_PATH according to platform. if is_aix: lp = compat.getenv("LIBPATH", "") elif is_darwin: lp = compat.getenv("DYLD_LIBRARY_PATH", "") else: lp = compat.getenv("LD_LIBRARY_PATH", "") for path in lp.split(os.pathsep): libs = glob(os.path.join(path, name + "*")) if libs: lib = libs[0] break # Look in /etc/ld.so.cache # TODO Look for ldconfig in /usr/sbin/ldconfig. /sbin is deprecated # in recent linux distributions. # Solaris does not have /sbin/ldconfig. Just check if this file exists. if lib is None and os.path.exists("/sbin/ldconfig"): expr = r"/[^\(\)\s]*%s\.[^\(\)\s]*" % re.escape(name) if is_freebsd: # This has a slightly different format than on linux, but the # regex still works. m = re.search(expr, compat.exec_command("/sbin/ldconfig", "-r")) else: m = re.search(expr, compat.exec_command("/sbin/ldconfig", "-p")) if m: lib = m.group(0) # Look in the known safe paths if lib is None: paths = ["/lib", "/lib32", "/lib64", "/usr/lib", "/usr/lib32", "/usr/lib64"] # On Debian/Ubuntu /usr/bin/python is linked statically with libpython. # Newer Debian/Ubuntu with multiarch support putsh the libpythonX.Y.so # To paths like /usr/lib/i386-linux-gnu/. try: import sysconfig # Module available only in Python 2.7. arch_subdir = sysconfig.get_config_var("multiarchsubdir") # Ignore if None is returned. if arch_subdir: arch_subdir = os.path.basename(arch_subdir) paths.extend( [ os.path.join("/usr/lib", arch_subdir), os.path.join("/usr/lib32", arch_subdir), os.path.join("/usr/lib64", arch_subdir), ] ) except ImportError: pass if is_aix: paths.append("/opt/freeware/lib") elif is_freebsd: paths.append("/usr/local/lib") for path in paths: libs = glob(os.path.join(path, name + "*")) if libs: lib = libs[0] break # give up :( if lib is None: return None # Resolve the file name into the soname if is_freebsd: # On FreeBSD objdump doesn't show SONAME, so we just return the lib # we've found return lib else: dir = os.path.dirname(lib) return os.path.join(dir, getSoname(lib))
def checkCache(fnm, strip=False, upx=False, upx_exclude=None, dist_nm=None, target_arch=None, codesign_identity=None, entitlements_file=None): """ Cache prevents preprocessing binary files again and again. 'dist_nm' Filename relative to dist directory. We need it on Mac to determine level of paths for @loader_path like '@loader_path/../../' for qt4 plugins. """ from PyInstaller.config import CONF # On Mac OS, a cache is required anyway to keep the libaries with relative install names. # Caching on Mac OS does not work since we need to modify binary headers to use relative paths to dll depencies and # starting with '@loader_path'. if not strip and not upx and not is_darwin and not is_win: return fnm if dist_nm is not None and ":" in dist_nm: # A file embedded in another PyInstaller build via multipackage. # No actual file exists to process. return fnm if strip: strip = True else: strip = False upx_exclude = upx_exclude or [] upx = (upx and (is_win or is_cygwin) and os.path.normcase(os.path.basename(fnm)) not in upx_exclude) # Load cache index. # Make cachedir per Python major/minor version. # This allows parallel building of executables with different Python versions as one user. pyver = 'py%d%s' % (sys.version_info[0], sys.version_info[1]) arch = platform.architecture()[0] cachedir = os.path.join(CONF['cachedir'], 'bincache%d%d_%s_%s' % (strip, upx, pyver, arch)) if target_arch: cachedir = os.path.join(cachedir, target_arch) if is_darwin: # Separate by codesign identity if codesign_identity: # Compute hex digest of codesign identity string to prevent issues with invalid characters. csi_hash = hashlib.sha256(codesign_identity.encode('utf-8')) cachedir = os.path.join(cachedir, csi_hash.hexdigest()) else: cachedir = os.path.join(cachedir, 'adhoc') # ad-hoc signing # Separate by entitlements if entitlements_file: # Compute hex digest of entitlements file contents with open(entitlements_file, 'rb') as fp: ef_hash = hashlib.sha256(fp.read()) cachedir = os.path.join(cachedir, ef_hash.hexdigest()) else: cachedir = os.path.join(cachedir, 'no-entitlements') if not os.path.exists(cachedir): os.makedirs(cachedir) cacheindexfn = os.path.join(cachedir, "index.dat") if os.path.exists(cacheindexfn): try: cache_index = misc.load_py_data_struct(cacheindexfn) except Exception: # Tell the user they may want to fix their cache... However, do not delete it for them; if it keeps getting # corrupted, we will never find out. logger.warning( "PyInstaller bincache may be corrupted; use pyinstaller --clean to fix it." ) raise else: cache_index = {} # Verify that the file we are looking for is present in the cache. Use the dist_mn if given to avoid different # extension modules sharing the same basename get corrupted. if dist_nm: basenm = os.path.normcase(dist_nm) else: basenm = os.path.normcase(os.path.basename(fnm)) # Binding redirects should be taken into account to see if the file needs to be reprocessed. The redirects may # change if the versions of dependent manifests change due to system updates. redirects = CONF.get('binding_redirects', []) digest = cacheDigest(fnm, redirects) cachedfile = os.path.join(cachedir, basenm) cmd = None if basenm in cache_index: if digest != cache_index[basenm]: os.remove(cachedfile) else: return cachedfile # Optionally change manifest and its dependencies to private assemblies. if fnm.lower().endswith(".manifest"): manifest = winmanifest.Manifest() manifest.filename = fnm with open(fnm, "rb") as f: manifest.parse_string(f.read()) if CONF.get('win_private_assemblies', False): if manifest.publicKeyToken: logger.info("Changing %s into private assembly", os.path.basename(fnm)) manifest.publicKeyToken = None for dep in manifest.dependentAssemblies: # Exclude common-controls which is not bundled if dep.name != "Microsoft.Windows.Common-Controls": dep.publicKeyToken = None applyRedirects(manifest, redirects) manifest.writeprettyxml(cachedfile) return cachedfile if upx: if strip: fnm = checkCache(fnm, strip=True, upx=False, dist_nm=dist_nm, target_arch=target_arch, codesign_identity=codesign_identity, entitlements_file=entitlements_file) # We need to avoid using UPX with Windows DLLs that have Control Flow Guard enabled, as it breaks them. if is_win and versioninfo.pefile_check_control_flow_guard(fnm): logger.info('Disabling UPX for %s due to CFG!', fnm) elif misc.is_file_qt_plugin(fnm): logger.info('Disabling UPX for %s due to it being a Qt plugin!', fnm) else: bestopt = "--best" # FIXME: Linux builds of UPX do not seem to contain LZMA (they assert out). # A better configure-time check is due. if CONF["hasUPX"] >= (3, ) and os.name == "nt": bestopt = "--lzma" upx_executable = "upx" if CONF.get('upx_dir'): upx_executable = os.path.join(CONF['upx_dir'], upx_executable) cmd = [upx_executable, bestopt, "-q", cachedfile] else: if strip: strip_options = [] if is_darwin: # The default strip behavior breaks some shared libraries under Mac OS. strip_options = ["-S"] # -S = strip only debug symbols. cmd = ["strip"] + strip_options + [cachedfile] if not os.path.exists(os.path.dirname(cachedfile)): os.makedirs(os.path.dirname(cachedfile)) # There are known some issues with 'shutil.copy2' on Mac OS 10.11 with copying st_flags. Issue #1650. # 'shutil.copy' copies also permission bits and it should be sufficient for PyInstaller's purposes. shutil.copy(fnm, cachedfile) # TODO: find out if this is still necessary when no longer using shutil.copy2() if hasattr(os, 'chflags'): # Some libraries on FreeBSD have immunable flag (libthr.so.3, for example). If this flag is preserved, # os.chmod() fails with: OSError: [Errno 1] Operation not permitted. try: os.chflags(cachedfile, 0) except OSError: pass os.chmod(cachedfile, 0o755) if os.path.splitext(fnm.lower())[1] in (".pyd", ".dll"): # When shared assemblies are bundled into the app, they may optionally be changed into private assemblies. try: res = winmanifest.GetManifestResources(os.path.abspath(cachedfile)) except winresource.pywintypes.error as e: if e.args[0] == winresource.ERROR_BAD_EXE_FORMAT: # Not a win32 PE file pass else: logger.error(os.path.abspath(cachedfile)) raise else: if winmanifest.RT_MANIFEST in res and len( res[winmanifest.RT_MANIFEST]): for name in res[winmanifest.RT_MANIFEST]: for language in res[winmanifest.RT_MANIFEST][name]: try: manifest = winmanifest.Manifest() manifest.filename = ":".join([ cachedfile, str(winmanifest.RT_MANIFEST), str(name), str(language) ]) manifest.parse_string( res[winmanifest.RT_MANIFEST][name][language], False) except Exception: logger.error( "Cannot parse manifest resource %s, =%s", name, language) logger.error("From file %s", cachedfile, exc_info=1) else: # optionally change manifest to private assembly private = CONF.get('win_private_assemblies', False) if private: if manifest.publicKeyToken: logger.info( "Changing %s into a private assembly", os.path.basename(fnm)) manifest.publicKeyToken = None # Change dep to private assembly for dep in manifest.dependentAssemblies: # Exclude common-controls which is not bundled if dep.name != "Microsoft.Windows.Common-Controls": dep.publicKeyToken = None redirecting = applyRedirects(manifest, redirects) if redirecting or private: try: manifest.update_resources( os.path.abspath(cachedfile), [name], [language]) except Exception: logger.error(os.path.abspath(cachedfile)) raise if cmd: logger.info("Executing - " + ' '.join(cmd)) # terminates if execution fails compat.exec_command(*cmd) # update cache index cache_index[basenm] = digest misc.save_py_data_struct(cacheindexfn, cache_index) # On Mac OS we need relative paths to dll dependencies starting with @executable_path. While modifying # the headers invalidates existing signatures, we avoid removing them in order to speed things up (and # to avoid potential bugs in the codesign utility, like the one reported on Mac OS 10.13 in #6167). # The forced re-signing at the end should take care of the invalidated signatures. if is_darwin: osxutils.binary_to_target_arch(cachedfile, target_arch, display_name=fnm) #osxutils.remove_signature_from_binary(cachedfile) # Disabled as per comment above. dylib.mac_set_relative_dylib_deps(cachedfile, dist_nm) osxutils.sign_binary(cachedfile, codesign_identity, entitlements_file) return cachedfile
def findLibrary(name): """ Look for a library in the system. Emulate the algorithm used by dlopen. `name`must include the prefix, e.g. ``libpython2.4.so`` """ assert is_unix, ("Current implementation for Unix only (Linux, Solaris, " "AIX, FreeBSD)") lib = None # Look in the LD_LIBRARY_PATH according to platform. if is_aix: lp = compat.getenv('LIBPATH', '') elif is_darwin: lp = compat.getenv('DYLD_LIBRARY_PATH', '') else: lp = compat.getenv('LD_LIBRARY_PATH', '') for path in lp.split(os.pathsep): libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # Look in /etc/ld.so.cache # TODO Look for ldconfig in /usr/sbin/ldconfig. /sbin is deprecated # in recent linux distributions. # Solaris does not have /sbin/ldconfig. Just check if this file exists. if lib is None and os.path.exists('/sbin/ldconfig'): expr = r'/[^\(\)\s]*%s\.[^\(\)\s]*' % re.escape(name) if is_freebsd: # This has a slightly different format than on linux, but the # regex still works. m = re.search(expr, compat.exec_command('/sbin/ldconfig', '-r')) else: m = re.search(expr, compat.exec_command('/sbin/ldconfig', '-p')) if m: lib = m.group(0) # Look in the known safe paths. if lib is None: # Architecture independent locations. paths = ['/lib', '/usr/lib'] # Architecture dependent locations. arch = compat.architecture() if arch == '32bit': paths.extend(['/lib32', '/usr/lib32', '/usr/lib/i386-linux-gnu']) else: paths.extend(['/lib64', '/usr/lib64', '/usr/lib/x86_64-linux-gnu']) # On Debian/Ubuntu /usr/bin/python is linked statically with libpython. # Newer Debian/Ubuntu with multiarch support putsh the libpythonX.Y.so # To paths like /usr/lib/i386-linux-gnu/. try: # Module available only in Python 2.7+ import sysconfig # 'multiarchsubdir' works on Debian/Ubuntu only in Python 2.7 and 3.3+. arch_subdir = sysconfig.get_config_var('multiarchsubdir') # Ignore if None is returned. if arch_subdir: arch_subdir = os.path.basename(arch_subdir) paths.append(os.path.join('/usr/lib', arch_subdir)) else: logger.debug('Multiarch directory not detected.') except ImportError: logger.debug('Multiarch directory not detected.') if is_aix: paths.append('/opt/freeware/lib') elif is_freebsd: paths.append('/usr/local/lib') for path in paths: libs = glob(os.path.join(path, name + '*')) if libs: lib = libs[0] break # give up :( if lib is None: return None # Resolve the file name into the soname if is_freebsd or is_aix: # On FreeBSD objdump doesn't show SONAME, # and on AIX objdump does not exist, # so we just return the lib we've found return lib else: dir = os.path.dirname(lib) return os.path.join(dir, _get_so_name(lib))
def _resolveCtypesImports(cbinaries): """Completes ctypes BINARY entries for modules with their full path. """ from ctypes.util import find_library if is_unix: envvar = "LD_LIBRARY_PATH" elif is_darwin: envvar = "DYLD_LIBRARY_PATH" else: envvar = "PATH" def _setPaths(): path = os.pathsep.join(PyInstaller.__pathex__) old = compat.getenv(envvar) if old is not None: path = os.pathsep.join((path, old)) compat.setenv(envvar, path) return old def _restorePaths(old): if old is None: compat.unsetenv(envvar) else: compat.setenv(envvar, old) ret = [] # Try to locate the shared library on disk. This is done by # executing ctypes.utile.find_library prepending ImportTracker's # local paths to library search paths, then replaces original values. old = _setPaths() for cbin in cbinaries: # Ignore annoying warnings like: # 'W: library kernel32.dll required via ctypes not found' # 'W: library coredll.dll required via ctypes not found' if cbin in ['coredll.dll', 'kernel32.dll']: continue ext = os.path.splitext(cbin)[1] # On Windows, only .dll files can be loaded. if os.name == "nt" and ext.lower() in [".so", ".dylib"]: continue cpath = find_library(os.path.splitext(cbin)[0]) if is_unix: # CAVEAT: find_library() is not the correct function. Ctype's # documentation says that it is meant to resolve only the filename # (as a *compiler* does) not the full path. Anyway, it works well # enough on Windows and Mac. On Linux, we need to implement # more code to find out the full path. if cpath is None: cpath = cbin # "man ld.so" says that we should first search LD_LIBRARY_PATH # and then the ldcache for d in compat.getenv(envvar, '').split(os.pathsep): if os.path.isfile(os.path.join(d, cpath)): cpath = os.path.join(d, cpath) break else: text = compat.exec_command("/sbin/ldconfig", "-p") for L in text.strip().splitlines(): if cpath in L: cpath = L.split("=>", 1)[1].strip() assert os.path.isfile(cpath) break else: cpath = None if cpath is None: logger.warn("library %s required via ctypes not found", cbin) else: ret.append((cbin, cpath, "BINARY")) _restorePaths(old) return ret
def load_ldconfig_cache(): """ Create a cache of the `ldconfig`-output to call it only once. It contains thousands of libraries and running it on every dylib is expensive. """ global LDCONFIG_CACHE if LDCONFIG_CACHE is not None: return if compat.is_musl: # Musl deliberately doesn't use ldconfig. The ldconfig executable either doesn't exist or it's a functionless # executable which, on calling with any arguments, simply tells you that those arguments are invalid. LDCONFIG_CACHE = {} return from distutils.spawn import find_executable ldconfig = find_executable('ldconfig') if ldconfig is None: # If `ldconfig` is not found in $PATH, search for it in some fixed directories. Simply use a second call instead # of fiddling around with checks for empty env-vars and string-concat. ldconfig = find_executable('ldconfig', '/usr/sbin:/sbin:/usr/bin:/usr/sbin') # If we still could not find the 'ldconfig' command... if ldconfig is None: LDCONFIG_CACHE = {} return if compat.is_freebsd or compat.is_openbsd: # This has a quite different format than other Unixes: # [vagrant@freebsd-10 ~]$ ldconfig -r # /var/run/ld-elf.so.hints: # search directories: /lib:/usr/lib:/usr/lib/compat:... # 0:-lgeom.5 => /lib/libgeom.so.5 # 184:-lpython2.7.1 => /usr/local/lib/libpython2.7.so.1 ldconfig_arg = '-r' splitlines_count = 2 pattern = re.compile(r'^\s+\d+:-l(\S+)(\s.*)? => (\S+)') else: # Skip first line of the library list because it is just an informative line and might contain localized # characters. Example of first line with locale set to cs_CZ.UTF-8: #$ /sbin/ldconfig -p #V keši „/etc/ld.so.cache“ nalezeno knihoven: 2799 # libzvbi.so.0 (libc6,x86-64) => /lib64/libzvbi.so.0 # libzvbi-chains.so.0 (libc6,x86-64) => /lib64/libzvbi-chains.so.0 ldconfig_arg = '-p' splitlines_count = 1 pattern = re.compile(r'^\s+(\S+)(\s.*)? => (\S+)') try: text = compat.exec_command(ldconfig, ldconfig_arg) except ExecCommandFailed: logger.warning("Failed to execute ldconfig. Disabling LD cache.") LDCONFIG_CACHE = {} return text = text.strip().splitlines()[splitlines_count:] LDCONFIG_CACHE = {} for line in text: # :fixme: this assumes libary names do not contain whitespace m = pattern.match(line) # Sanitize away any abnormal lines of output. if m is None: # Warn about it then skip the rest of this iteration. if re.search("Cache generated by:", line): # See #5540. This particular line is harmless. pass else: logger.warning("Unrecognised line of output %r from ldconfig", line) continue path = m.groups()[-1] if compat.is_freebsd or compat.is_openbsd: # Insert `.so` at the end of the lib's basename. soname and filename may have (different) trailing versions. # We assume the `.so` in the filename to mark the end of the lib's basename. bname = os.path.basename(path).split('.so', 1)[0] name = 'lib' + m.group(1) assert name.startswith(bname) name = bname + '.so' + name[len(bname):] else: name = m.group(1) # ldconfig may know about several versions of the same lib, e.g., differents arch, different libc, etc. # Use the first entry. if name not in LDCONFIG_CACHE: LDCONFIG_CACHE[name] = path