def clean_orphaned_assets() -> None: """Remove asset files that are no longer part of the build.""" import os import json import efrotools # Operate from dist root.. os.chdir(PROJROOT) # Our manifest is split into 2 files (public and private) with open('assets/.asset_manifest_public.json', encoding='utf-8') as infile: manifest = set(json.loads(infile.read())) with open('assets/.asset_manifest_private.json', encoding='utf-8') as infile: manifest.update(set(json.loads(infile.read()))) for root, _dirs, fnames in os.walk('assets/build'): for fname in fnames: fpath = os.path.join(root, fname) fpathrel = fpath[13:] # paths are relative to assets/build if fpathrel not in manifest: print(f'Removing orphaned asset file: {fpath}') os.unlink(fpath) # Lastly, clear empty dirs. efrotools.run('find assets/build -depth -empty -type d -delete')
def update_docs_md(check: bool) -> None: """Updates docs markdown files if necessary.""" # pylint: disable=too-many-locals from efrotools import get_files_hash, run docs_path = 'docs/ba_module.md' # We store the hash in a separate file that only exists on private # so public isn't full of constant hash change commits. # (don't care so much on private) docs_hash_path = '.cache/ba_module_hash' # Generate a hash from all c/c++ sources under the python subdir # as well as all python scripts. pysources = [] exts = ['.cc', '.c', '.h', '.py'] for basedir in [ 'src/ballistica/python', 'tools/efro', 'tools/bacommon', 'assets/src/ba_data/python/ba', ]: assert os.path.isdir(basedir), f'{basedir} is not a dir.' for root, _dirs, files in os.walk(basedir): for fname in files: if any(fname.endswith(ext) for ext in exts): pysources.append(os.path.join(root, fname)) pysources.sort() storedhash: Optional[str] curhash = get_files_hash(pysources) # Extract the current embedded hash. if os.path.exists(docs_hash_path): with open(docs_hash_path, encoding='utf-8') as infile: storedhash = infile.read() else: storedhash = None if (storedhash is None or curhash != storedhash or not os.path.exists(docs_path)): if check: raise RuntimeError('Docs markdown is out of date.') print(f'Updating {docs_path}...', flush=True) run('make docs') # Our docs markdown is just the docs html with a few added # bits at the top. with open('build/docs.html', encoding='utf-8') as infile: docs = infile.read() docs = ('<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->\n' ) + docs os.makedirs(os.path.dirname(docs_path), exist_ok=True) with open(docs_path, 'w', encoding='utf-8') as outfile: outfile.write(docs) with open(docs_hash_path, 'w', encoding='utf-8') as outfile: outfile.write(curhash) print(f'{docs_path} is up to date.')
def winprune() -> None: """Prune unneeded files from windows python dists.""" for libdir in ('assets/src/windows/Win32/Lib', 'assets/src/windows/x64/Lib'): assert os.path.isdir(libdir) run('cd "' + libdir + '" && rm -rf ' + ' '.join(PRUNE_LIB_NAMES)) for dlldir in ('assets/src/windows/Win32/DLLs', 'assets/src/windows/x64/DLLs'): assert os.path.isdir(dlldir) run('cd "' + dlldir + '" && rm -rf ' + ' '.join(PRUNE_DLL_NAMES)) print('Win-prune successful.')
def warm_start_cache() -> None: """Run a pre-pass on the efrocache to improve efficiency.""" import json from efrotools import run # We maintain a starter-cache on the staging server, which # is simply the latest set of cache entries compressed into a single # compressed archive. If we have no local cache yet we can download # and expand this to give us a nice head start and greatly reduce # the initial set of individual files we have to fetch. # (downloading a single compressed archive is much more efficient than # downloading thousands) if not os.path.exists(CACHE_DIR_NAME): print('Downloading asset starter-cache...', flush=True) run(f'curl {BASE_URL}startercache.tar.xz > startercache.tar.xz') print('Decompressing starter-cache...', flush=True) run('tar -xf startercache.tar.xz') run(f'mv efrocache {CACHE_DIR_NAME}') run(f'rm startercache.tar.xz') print('Starter-cache fetched successfully!' ' (should speed up asset builds)') # In the public build, let's scan through all files managed by # efrocache and update any with timestamps older than the latest # cache-map that we already have the data for. # Otherwise those files will update individually the next time # they are 'built'. Even though that only takes a fraction of a # second per file, it adds up when done for thousands of assets # each time the cache map changes. It is much more efficient to do # it in one go here. cachemap: Dict[str, str] with open(CACHE_MAP_NAME) as infile: cachemap = json.loads(infile.read()) assert isinstance(cachemap, dict) cachemap_mtime = os.path.getmtime(CACHE_MAP_NAME) entries: List[Tuple[str, str]] = [] for fname, url in cachemap.items(): # File hasn't been pulled from cache yet = ignore. if not os.path.exists(fname): continue # File is newer than the cache map = ignore. if cachemap_mtime < os.path.getmtime(fname): continue # Don't have the cache source file for this guy = ignore. cachefile = CACHE_DIR_NAME + '/' + '/'.join(url.split('/')[-3:]) if not os.path.exists(cachefile): continue # Ok, add it to the list of files we can potentially update timestamps # on once we check its hash. filehash = ''.join(url.split('/')[-3:]) entries.append((fname, filehash)) if entries: # Now fire off a multithreaded executor to check hashes and update # timestamps. _check_warm_start_entries(entries)
def resize_image() -> None: """Resize an image and save it to a new location. args: xres, yres, src, dst """ import os import efrotools if len(sys.argv) != 6: raise Exception('expected 5 args') width = int(sys.argv[2]) height = int(sys.argv[3]) src = sys.argv[4] dst = sys.argv[5] if not dst.endswith('.png'): raise RuntimeError(f'dst must be a png; got "{dst}"') if not src.endswith('.png'): raise RuntimeError(f'src must be a png; got "{src}"') print('Creating: ' + os.path.basename(dst), file=sys.stderr) efrotools.run(f'convert "{src}" -resize {width}x{height} "{dst}"')
def androidaddr(archive_dir: str, arch: str, addr: str) -> None: """Print the source file location for an android program-counter. command line args: archive_dir architecture addr """ if not os.path.isdir(archive_dir): print('ERROR: invalid archive dir: "' + archive_dir + '"') sys.exit(255) archs = { 'x86': { 'prefix': 'x86-', 'libmain': 'libmain_x86.so' }, 'arm': { 'prefix': 'arm-', 'libmain': 'libmain_arm.so' }, 'arm64': { 'prefix': 'aarch64-', 'libmain': 'libmain_arm64.so' }, 'x86-64': { 'prefix': 'x86_64-', 'libmain': 'libmain_x86-64.so' } } if arch not in archs: print('ERROR: invalid arch "' + arch + '"; (choices are ' + ', '.join(archs.keys()) + ')') sys.exit(255) sdkutils = 'tools/android_sdk_utils' rootdir = '.' ndkpath = subprocess.check_output([sdkutils, 'get-ndk-path']).decode().strip() if not os.path.isdir(ndkpath): print("ERROR: ndk-path '" + ndkpath + '" does not exist') sys.exit(255) lines = subprocess.check_output( ['find', os.path.join(ndkpath, 'toolchains'), '-name', '*addr2line']).decode().strip().splitlines() lines = [line for line in lines if archs[arch]['prefix'] in line] if len(lines) != 1: print("ERROR: couldn't find addr2line binary") sys.exit(255) addr2line = lines[0] efrotools.run('mkdir -p "' + os.path.join(rootdir, 'android_addr_tmp') + '"') try: efrotools.run('cd "' + os.path.join(rootdir, 'android_addr_tmp') + '" && tar -xf "' + os.path.join(archive_dir, 'unstripped_libs', archs[arch]['libmain'] + '.tgz') + '"') efrotools.run( addr2line + ' -e "' + os.path.join(rootdir, 'android_addr_tmp', archs[arch]['libmain']) + '" ' + addr) finally: os.system('rm -rf "' + os.path.join(rootdir, 'android_addr_tmp') + '"')
def get_target(path: str) -> None: """Fetch a target path from the cache, downloading if need be.""" import json from efrotools import run with open(CACHE_MAP_NAME) as infile: efrocachemap = json.loads(infile.read()) if path not in efrocachemap: raise RuntimeError(f'Path not found in efrocache: {path}') url = efrocachemap[path] subpath = '/'.join(url.split('/')[-3:]) local_cache_path = os.path.join(CACHE_DIR_NAME, subpath) local_cache_path_dl = local_cache_path + '.download' hashval = ''.join(subpath.split('/')) # First off: if there's already a file in place, check its hash. # If it matches the cache, we can just update its timestamp and # call it a day. if os.path.isfile(path): existing_hash = get_file_hash(path) if existing_hash == hashval: os.utime(path, None) print(f'Refreshing from cache: {path}') return # Ok there's not a valid file in place already. # Clear out whatever is there to start with. if os.path.exists(path): os.unlink(path) # Now if we don't have this entry in our local cache, # download it. if not os.path.exists(local_cache_path): os.makedirs(os.path.dirname(local_cache_path), exist_ok=True) print(f'Downloading: {CLRBLU}{path}{CLREND}') run(f'curl --silent {url} > {local_cache_path_dl}') run(f'mv {local_cache_path_dl} {local_cache_path}') # Ok we should have a valid .tar.gz file in our cache dir at this point. # Just expand it and it get placed wherever it belongs. print(f'Extracting: {path}') run(f'tar -zxf {local_cache_path}') # The file will wind up with the timestamp it was compressed with, # so let's update its timestamp or else it will still be considered # dirty. run(f'touch {path}') if not os.path.exists(path): raise RuntimeError(f'File {path} did not wind up as expected.')
def _write_cache_file(staging_dir: str, fname: str) -> Tuple[str, str]: import hashlib from efrotools import run print(f'Caching {fname}') if ' ' in fname: raise RuntimeError('Spaces in paths not supported.') # Just going with ol' md5 here; we're the only ones creating these so # security isn't a concern. md5 = hashlib.md5() with open(fname, 'rb') as infile: md5.update(infile.read()) md5.update(fname.encode()) finalhash = md5.hexdigest() hashpath = os.path.join(finalhash[:2], finalhash[2:4], finalhash[4:]) path = os.path.join(staging_dir, hashpath) os.makedirs(os.path.dirname(path), exist_ok=True) # Fancy pipe stuff which will give us deterministic # tar.gz files (no embedded timestamps) # Note: The 'COPYFILE_DISABLE' prevents mac tar from adding # file attributes/resource-forks to the archive as as ._filename. run(f'COPYFILE_DISABLE=1 tar cf - {fname} | gzip -n > {path}') return fname, hashpath
def _upload_cache(fnames1: List[str], fnames2: List[str], hashes_str: str, hashes_existing_str: str) -> None: from efrotools import run # First, if we've run before, print the files causing us to re-run: if hashes_existing_str != '': changed_files: Set[str] = set() hashes = json.loads(hashes_str) hashes_existing = json.loads(hashes_existing_str) for fname, ftime in hashes.items(): if ftime != hashes_existing.get(fname, ''): changed_files.add(fname) # We've covered modifications and additions; add deletions: for fname in hashes_existing: if fname not in hashes: changed_files.add(fname) print(f'{Clr.SBLU}Updating efrocache due to' f' {len(changed_files)} changes:{Clr.RST}') for fname in sorted(changed_files): print(f' {Clr.SBLU}{fname}{Clr.RST}') # Now do the thing. staging_dir = 'build/efrocache' mapping_file = 'build/efrocachemap' run(f'rm -rf {staging_dir}') run(f'mkdir -p {staging_dir}') _write_cache_files(fnames1, fnames2, staging_dir, mapping_file) print(f'{Clr.SBLU}Starter cache includes {len(fnames1)} items;' f' excludes {len(fnames2)}{Clr.RST}') # Sync all individual cache files to the staging server. print(f'{Clr.SBLU}Pushing cache to staging...{Clr.RST}', flush=True) run('rsync --progress --recursive --human-readable build/efrocache/' ' [email protected]:files.ballistica.net/cache/ba1/') # Now generate the starter cache on the server.. run('ssh -oBatchMode=yes -oStrictHostKeyChecking=yes [email protected]' ' "cd files.ballistica.net/cache/ba1 && python3 genstartercache.py"')
def build_apple(arch: str, debug: bool = False) -> None: """Run a build for the provided apple arch (mac, ios, or tvos).""" builddir = 'build/python_apple_' + arch + ('_debug' if debug else '') efrotools.run('rm -rf "' + builddir + '"') efrotools.run('mkdir -p build') efrotools.run('git clone ' '[email protected]:pybee/Python-Apple-support.git "' + builddir + '"') os.chdir(builddir) # TEMP: Check out a particular commit while the branch head is broken. # efrotools.run('git checkout 1a9c71dca298c03517e8236b81cf1d9c8c521cbf') efrotools.run(f'git checkout {PYTHON_VERSION_MAJOR}') # On mac we currently have to add the _scproxy module or urllib will # fail. txt = efrotools.readfile('patch/Python/Setup.embedded') if arch == 'mac': txt += ('\n' '# ericf added - mac urllib needs this\n' '_scproxy _scproxy.c ' '-framework SystemConfiguration ' '-framework CoreFoundation') # Turn off sqlite module. (scratch that; leaving it in.) # txt = efrotools.replace_one(txt, '_sqlite3 -I$(', '#_sqlite3 -I$(') # txt = txt.replace(' _sqlite/', '# _sqlite/') # Turn off xz compression module. (scratch that; leaving it in.) # txt = efrotools.replace_one(txt, '_lzma _', '#_lzma _') # Turn off bzip2 module. txt = efrotools.replace_one(txt, '_bz2 _b', '#_bz2 _b') # Turn off openssl module (only if not doing openssl). if not ENABLE_OPENSSL: txt = efrotools.replace_one(txt, '_hashlib _hashopenssl.c', '#_hashlib _hashopenssl.c') # Turn off various other stuff we don't use. for line in [ '_codecs _codecsmodule.c', '_codecs_cn cjkcodecs/_codecs_cn.c', '_codecs_hk cjkcodecs/_codecs_hk.c', '_codecs_iso2022 cjkcodecs/', '_codecs_jp cjkcodecs/_codecs_jp.c', '_codecs_jp cjkcodecs/_codecs_jp.c', '_codecs_kr cjkcodecs/_codecs_kr.c', '_codecs_tw cjkcodecs/_codecs_tw.c', '_lsprof _lsprof.o rotatingtree.c', '_multibytecodec cjkcodecs/multibytecodec.c', '_multiprocessing _multiprocessing/multiprocessing.c', '_opcode _opcode.c', 'audioop audioop.c', 'grp grpmodule.c', 'mmap mmapmodule.c', 'parser parsermodule.c', 'pyexpat expat/xmlparse.c', ' expat/xmlrole.c ', ' expat/xmltok.c ', ' pyexpat.c ', ' -I$(srcdir)/Modules/expat ', ' -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI' ' -DXML_DEV_URANDOM', 'resource resource.c', 'syslog syslogmodule.c', 'termios termios.c', '_ctypes_test _ctypes/_ctypes_test.c', '_testbuffer _testbuffer.c', '_testimportmultiple _testimportmultiple.c', '_crypt _cryptmodule.c', # not on android so disabling here too ]: txt = efrotools.replace_one(txt, line, '#' + line) if ENABLE_OPENSSL: # _md5 and _sha modules are normally only built if the # system does not have the OpenSSL libs containing an optimized # version. # Note: seems we still need sha3 or we get errors for line in [ '_md5 md5module.c', '_sha1 sha1module.c', # '_sha3 _sha3/sha3module.c', '_sha256 sha256module.c', '_sha512 sha512module.c', ]: txt = efrotools.replace_one(txt, line, '#' + line) else: txt = efrotools.replace_one(txt, '_ssl _ssl.c', '#_ssl _ssl.c') efrotools.writefile('patch/Python/Setup.embedded', txt) txt = efrotools.readfile('Makefile') # Fix a bug where spaces in PATH cause errors (darn you vmware fusion!) txt = efrotools.replace_one( txt, '&& PATH=$(PROJECT_DIR)/$(PYTHON_DIR-macOS)/dist/bin:$(PATH) .', '&& PATH="$(PROJECT_DIR)/$(PYTHON_DIR-macOS)/dist/bin:$(PATH)" .') # Remove makefile dependencies so we don't build the # libs we're not using. srctxt = '$$(PYTHON_DIR-$1)/dist/lib/libpython$(PYTHON_VER)m.a: ' txt = efrotools.replace_one( txt, srctxt, '$$(PYTHON_DIR-$1)/dist/lib/libpython$(PYTHON_VER)m.a: ' + ('build/$2/Support/OpenSSL ' if ENABLE_OPENSSL else '') + 'build/$2/Support/XZ $$(PYTHON_DIR-$1)/Makefile\n#' + srctxt) srctxt = ('dist/Python-$(PYTHON_VER)-$1-support.' 'b$(BUILD_NUMBER).tar.gz: ') txt = efrotools.replace_one( txt, srctxt, 'dist/Python-$(PYTHON_VER)-$1-support.b$(BUILD_NUMBER).tar.gz:' ' $$(PYTHON_FRAMEWORK-$1)\n#' + srctxt) # Turn doc strings on; looks like it only adds a few hundred k. txt = txt.replace('--without-doc-strings', '--with-doc-strings') # We're currently aiming at 10.13+ on mac # (see issue with utimensat and futimens). txt = efrotools.replace_one(txt, 'MACOSX_DEPLOYMENT_TARGET=10.8', 'MACOSX_DEPLOYMENT_TARGET=10.13') # And equivalent iOS (11+). txt = efrotools.replace_one(txt, 'CFLAGS-iOS=-mios-version-min=8.0', 'CFLAGS-iOS=-mios-version-min=11.0') # Ditto for tvOS. txt = efrotools.replace_one(txt, 'CFLAGS-tvOS=-mtvos-version-min=9.0', 'CFLAGS-tvOS=-mtvos-version-min=11.0') if debug: # Add debug build flag # (Currently expect to find 2 instances of this). dline = '--with-doc-strings --enable-ipv6 --without-ensurepip' splitlen = len(txt.split(dline)) if splitlen != 3: raise Exception("unexpected configure lines") txt = txt.replace(dline, '--with-pydebug ' + dline) # Debug has a different name. # (Currently expect to replace 13 instances of this). dline = 'python$(PYTHON_VER)m' splitlen = len(txt.split(dline)) if splitlen != 14: raise Exception("unexpected configure lines") txt = txt.replace(dline, 'python$(PYTHON_VER)dm') efrotools.writefile('Makefile', txt) # Ok; let 'er rip. # (we run these in parallel so limit to 1 job a piece; # otherwise they inherit the -j12 or whatever from the top level) # (also this build seems to fail with multiple threads) efrotools.run('make -j1 ' + { 'mac': 'Python-macOS', 'ios': 'Python-iOS', 'tvos': 'Python-tvOS' }[arch]) print('python build complete! (apple/' + arch + ')')
def build_android(rootdir: str, arch: str, debug: bool = False) -> None: """Run a build for android with the given architecture. (can be arm, arm64, x86, or x86_64) """ import subprocess builddir = 'build/python_android_' + arch + ('_debug' if debug else '') efrotools.run('rm -rf "' + builddir + '"') efrotools.run('mkdir -p build') efrotools.run('git clone ' '[email protected]:yan12125/python3-android.git "' + builddir + '"') os.chdir(builddir) # It seems we now need 'autopoint' as part of this build, but on mac it # is not available on the normal path, but only as part of the keg-only # gettext homebrew formula. if (subprocess.run('which autopoint', shell=True, check=False).returncode != 0): print("Updating path for mac autopoint...") appath = subprocess.run('brew ls gettext | grep bin/autopoint', shell=True, check=True, capture_output=True) appathout = os.path.dirname(appath.stdout.decode().strip()) os.environ['PATH'] += (':' + appathout) print(f'ADDED "{appathout}" TO SYS PATH...') # Commit from Jan 8, 2020. Right after this, the build system was switched # a a completely new minimal one which will take some work to update here. # Punting on that for now... if True: # pylint: disable=using-constant-test efrotools.run('git checkout 9adbcfaca37f40b7a86381f83f0f6af4187233ae') ftxt = efrotools.readfile('pybuild/env.py') # Set the packages we build. ftxt = efrotools.replace_one( ftxt, 'packages = (', "packages = ('zlib', 'sqlite', 'xz'," + (" 'openssl'" if ENABLE_OPENSSL else "") + ")\n# packages = (") # Don't wanna bother with gpg signing stuff. ftxt = efrotools.replace_one(ftxt, 'verify_source = True', 'verify_source = False') # Sub in the min api level we're targeting. ftxt = efrotools.replace_one(ftxt, 'android_api_level = 21', 'android_api_level = 21') ftxt = efrotools.replace_one(ftxt, "target_arch = 'arm'", "target_arch = '" + arch + "'") efrotools.writefile('pybuild/env.py', ftxt) ftxt = efrotools.readfile('Makefile') # This needs to be python3 for us. ftxt = efrotools.replace_one(ftxt, 'PYTHON?=python\n', 'PYTHON?=python3\n') efrotools.writefile('Makefile', ftxt) ftxt = efrotools.readfile('pybuild/packages/python.py') # We currently build as a static lib. ftxt = efrotools.replace_one(ftxt, " '--enable-shared',\n", "") ftxt = efrotools.replace_one( ftxt, "super().__init__('https://github.com/python/cpython/')", "super().__init__('https://github.com/python/cpython/', branch='3.7')") # Turn ipv6 on (curious why its turned off here?...) # Also, turn on low level debugging features for our debug builds. ftxt = efrotools.replace_one(ftxt, "'--disable-ipv6',", "'--enable-ipv6',") if debug: ftxt = efrotools.replace_one(ftxt, "'./configure',", "'./configure', '--with-pydebug',") # We don't use this stuff so lets strip it out to simplify. ftxt = efrotools.replace_one(ftxt, "'--without-ensurepip',", "") # This builds all modules as dynamic libs, but we want to be consistent # with our other embedded builds and just static-build the ones we # need... so to change that we'll need to add a hook for ourself after # python is downloaded but before it is built so we can muck with it. ftxt = efrotools.replace_one( ftxt, ' def build(self):', ' def build(self):\n import os\n' ' if os.system(\'"' + rootdir + '/tools/snippets" python_android_patch "' + os.getcwd() + '"\') != 0: raise Exception("patch apply failed")') efrotools.writefile('pybuild/packages/python.py', ftxt) # Set this to a particular cpython commit to target exact releases from git commit = '43364a7ae01fbe4288ef42622259a0038ce1edcc' # 3.7.6 release if commit is not None: ftxt = efrotools.readfile('pybuild/source.py') # Check out a particular commit right after the clone. ftxt = efrotools.replace_one( ftxt, "'git', 'clone', '--single-branch', '-b'," " self.branch, self.source_url, self.dest])", "'git', 'clone', '-b'," " self.branch, self.source_url, self.dest])\n" " # efro: hack to get the python we want.\n" " print('DOING URL', self.source_url)\n" " if self.source_url == " "'https://github.com/python/cpython/':\n" " run_in_dir(['git', 'checkout', '" + commit + "'], self.source_dir)") efrotools.writefile('pybuild/source.py', ftxt) ftxt = efrotools.readfile('pybuild/util.py') # Still don't wanna bother with gpg signing stuff. ftxt = efrotools.replace_one( ftxt, 'def gpg_verify_file(sig_filename, filename, validpgpkeys):\n', 'def gpg_verify_file(sig_filename, filename, validpgpkeys):\n' ' print("gpg-verify disabled by ericf")\n' ' return\n') efrotools.writefile('pybuild/util.py', ftxt) # These builds require ANDROID_NDK to be set, so make sure that's # the case. os.environ['ANDROID_NDK'] = subprocess.check_output( [rootdir + '/tools/android_sdk_utils', 'get-ndk-path']).decode().strip() # Ok, let 'er rip # (we often run these builds in parallel so limit to 1 job a piece; # otherwise they each inherit the -j12 or whatever from the top level). efrotools.run('make -j1') print('python build complete! (android/' + arch + ')')
def build_apple(arch: str, debug: bool = False) -> None: """Run a build for the provided apple arch (mac, ios, or tvos).""" import platform import subprocess from efro.error import CleanError # IMPORTANT; seems we currently wind up building against /usr/local gettext # stuff. Hopefully the maintainer fixes this, but for now I need to # remind myself to blow it away while building. # (via brew remove gettext --ignore-dependencies) if ('MacBook-Fro' in platform.node() and os.environ.get('SKIP_GETTEXT_WARNING') != '1'): if (subprocess.run('which gettext', shell=True, check=False).returncode == 0): raise CleanError( 'NEED TO TEMP-KILL GETTEXT (or set SKIP_GETTEXT_WARNING=1)') builddir = 'build/python_apple_' + arch + ('_debug' if debug else '') run('rm -rf "' + builddir + '"') run('mkdir -p build') run('git clone ' 'https://github.com/beeware/Python-Apple-support.git "' + builddir + '"') os.chdir(builddir) # TEMP: Check out a particular commit while the branch head is broken. # We can actually fix this to use the current one, but something # broke in the underlying build even on old commits so keeping it # locked for now... # run('git checkout bf1ed73d0d5ff46862ba69dd5eb2ffaeff6f19b6') run(f'git checkout {PYVER}') txt = readfile('Makefile') # Fix a bug where spaces in PATH cause errors (darn you vmware fusion!) txt = replace_one( txt, '&& PATH=$(PROJECT_DIR)/$(PYTHON_DIR-macOS)/dist/bin:$(PATH) .', '&& PATH="$(PROJECT_DIR)/$(PYTHON_DIR-macOS)/dist/bin:$(PATH)" .') # Turn doc strings on; looks like it only adds a few hundred k. txt = txt.replace('--without-doc-strings', '--with-doc-strings') # Set mac/ios version reqs # (see issue with utimensat and futimens). txt = replace_one(txt, 'MACOSX_DEPLOYMENT_TARGET=10.8', 'MACOSX_DEPLOYMENT_TARGET=10.15') # And equivalent iOS (11+). txt = replace_one(txt, 'CFLAGS-iOS=-mios-version-min=8.0', 'CFLAGS-iOS=-mios-version-min=13.0') # Ditto for tvOS. txt = replace_one(txt, 'CFLAGS-tvOS=-mtvos-version-min=9.0', 'CFLAGS-tvOS=-mtvos-version-min=13.0') if debug: # Add debug build flag # (Currently expect to find 2 instances of this). dline = '--with-doc-strings --enable-ipv6 --without-ensurepip' splitlen = len(txt.split(dline)) if splitlen != 3: raise Exception('unexpected configure lines') txt = txt.replace(dline, '--with-pydebug ' + dline) # Debug has a different name. # (Currently expect to replace 12 instances of this). dline = ('python$(PYTHON_VER)' if NEWER_PY_TEST else 'python$(PYTHON_VER)m') splitlen = len(txt.split(dline)) if splitlen != 13: raise RuntimeError(f'Unexpected configure line count {splitlen}.') txt = txt.replace( dline, 'python$(PYTHON_VER)d' if NEWER_PY_TEST else 'python$(PYTHON_VER)dm') # Inject our custom modifications to fire before building. txt = txt.replace( ' # Configure target Python\n', ' cd $$(PYTHON_DIR-$1) && ' f'../../../../../tools/pcommand python_apple_patch {arch}\n' ' # Configure target Python\n', ) writefile('Makefile', txt) # Ok; let 'er rip. # (we run these in parallel so limit to 1 job a piece; # otherwise they inherit the -j12 or whatever from the top level) # (also this build seems to fail with multiple threads) run( 'make -j1 ' + { 'mac': 'Python-macOS', # 'mac': 'build/macOS/Python-3.9.6-macOS/Makefile', 'ios': 'Python-iOS', 'tvos': 'Python-tvOS' }[arch]) print('python build complete! (apple/' + arch + ')')
def build_apple(arch: str, debug: bool = False) -> None: """Run a build for the provided apple arch (mac, ios, or tvos).""" # pylint: disable=too-many-branches # pylint: disable=too-many-statements import platform import subprocess from efro.error import CleanError # IMPORTANT; seems we currently wind up building against /usr/local gettext # stuff. Hopefully the maintainer fixes this, but for now I need to # remind myself to blow it away while building. # (via brew remove gettext --ignore-dependencies) if 'MacBook-Fro' in platform.node(): if (subprocess.run('which gettext', shell=True, check=False).returncode == 0): raise CleanError('NEED TO TEMP-KILL GETTEXT') builddir = 'build/python_apple_' + arch + ('_debug' if debug else '') run('rm -rf "' + builddir + '"') run('mkdir -p build') run('git clone ' '[email protected]:pybee/Python-Apple-support.git "' + builddir + '"') os.chdir(builddir) # TEMP: Check out a particular commit while the branch head is broken. # We can actually fix this to use the current one, but something # broke in the underlying build even on old commits so keeping it # locked for now... # run('git checkout bf1ed73d0d5ff46862ba69dd5eb2ffaeff6f19b6') run(f'git checkout {PYVER}') # On mac we currently have to add the _scproxy module or urllib will # fail. txt = readfile('patch/Python/Setup.embedded') if arch == 'mac': txt += ('\n' '# ericf added - mac urllib needs this\n' '_scproxy _scproxy.c ' '-framework SystemConfiguration ' '-framework CoreFoundation') # Turn off sqlite module. (scratch that; leaving it in.) # txt = replace_one(txt, '_sqlite3 -I$(', '#_sqlite3 -I$(') # txt = txt.replace(' _sqlite/', '# _sqlite/') # Turn off xz compression module. (scratch that; leaving it in.) # txt = replace_one(txt, '_lzma _', '#_lzma _') # Turn off bzip2 module. txt = replace_one(txt, '_bz2 _b', '#_bz2 _b') # Turn off openssl module (only if not doing openssl). if not ENABLE_OPENSSL: txt = replace_one(txt, '_hashlib _hashopenssl.c', '#_hashlib _hashopenssl.c') # Turn off various other stuff we don't use. for line in [ '_codecs _codecsmodule.c', '_codecs_cn cjkcodecs/_codecs_cn.c', '_codecs_hk cjkcodecs/_codecs_hk.c', '_codecs_iso2022 cjkcodecs/', '_codecs_jp cjkcodecs/_codecs_jp.c', '_codecs_jp cjkcodecs/_codecs_jp.c', '_codecs_kr cjkcodecs/_codecs_kr.c', '_codecs_tw cjkcodecs/_codecs_tw.c', '_lsprof _lsprof.o rotatingtree.c', '_multibytecodec cjkcodecs/multibytecodec.c', '_multiprocessing _multiprocessing/multiprocessing.c', '_opcode _opcode.c', 'audioop audioop.c', 'grp grpmodule.c', 'mmap mmapmodule.c', 'parser parsermodule.c', 'pyexpat expat/xmlparse.c', ' expat/xmlrole.c ', ' expat/xmltok.c ', ' pyexpat.c ', ' -I$(srcdir)/Modules/expat ', ' -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI' ' -DXML_DEV_URANDOM', 'resource resource.c', 'syslog syslogmodule.c', 'termios termios.c', '_ctypes_test _ctypes/_ctypes_test.c', '_testbuffer _testbuffer.c', '_testimportmultiple _testimportmultiple.c', '_crypt _cryptmodule.c', # not on android so disabling here too ]: txt = replace_one(txt, line, '#' + line) if ENABLE_OPENSSL: # _md5 and _sha modules are normally only built if the # system does not have the OpenSSL libs containing an optimized # version. # Note: seems we still need sha3 or we get errors for line in [ '_md5 md5module.c', '_sha1 sha1module.c', # '_sha3 _sha3/sha3module.c', '_sha256 sha256module.c', '_sha512 sha512module.c', ]: txt = replace_one(txt, line, '#' + line) else: txt = replace_one(txt, '_ssl _ssl.c', '#_ssl _ssl.c') writefile('patch/Python/Setup.embedded', txt) txt = readfile('Makefile') # Fix a bug where spaces in PATH cause errors (darn you vmware fusion!) txt = replace_one( txt, '&& PATH=$(PROJECT_DIR)/$(PYTHON_DIR-macOS)/dist/bin:$(PATH) .', '&& PATH="$(PROJECT_DIR)/$(PYTHON_DIR-macOS)/dist/bin:$(PATH)" .') # Remove makefile dependencies so we don't build the # libs we're not using. srctxt = '$$(PYTHON_DIR-$1)/dist/lib/libpython$(PYTHON_VER).a: ' if PY38: txt = replace_one( txt, srctxt, '$$(PYTHON_DIR-$1)/dist/lib/libpython$(PYTHON_VER).a: ' + ('build/$2/Support/OpenSSL ' if ENABLE_OPENSSL else '') + 'build/$2/Support/XZ $$(PYTHON_DIR-$1)/Makefile\n#' + srctxt) else: txt = replace_one( txt, srctxt, '$$(PYTHON_DIR-$1)/dist/lib/libpython$(PYTHON_VER)m.a: ' + ('build/$2/Support/OpenSSL ' if ENABLE_OPENSSL else '') + 'build/$2/Support/XZ $$(PYTHON_DIR-$1)/Makefile\n#' + srctxt) srctxt = ('dist/Python-$(PYTHON_VER)-$1-support.' '$(BUILD_NUMBER).tar.gz: ') txt = replace_one( txt, srctxt, 'dist/Python-$(PYTHON_VER)-$1-support.$(BUILD_NUMBER).tar.gz:' ' $$(PYTHON_FRAMEWORK-$1)\n#' + srctxt) # Turn doc strings on; looks like it only adds a few hundred k. txt = txt.replace('--without-doc-strings', '--with-doc-strings') # Set mac/ios version reqs # (see issue with utimensat and futimens). txt = replace_one(txt, 'MACOSX_DEPLOYMENT_TARGET=10.8', 'MACOSX_DEPLOYMENT_TARGET=10.15') # And equivalent iOS (11+). txt = replace_one(txt, 'CFLAGS-iOS=-mios-version-min=8.0', 'CFLAGS-iOS=-mios-version-min=13.0') # Ditto for tvOS. txt = replace_one(txt, 'CFLAGS-tvOS=-mtvos-version-min=9.0', 'CFLAGS-tvOS=-mtvos-version-min=13.0') if debug: # Add debug build flag # (Currently expect to find 2 instances of this). dline = '--with-doc-strings --enable-ipv6 --without-ensurepip' splitlen = len(txt.split(dline)) if splitlen != 3: raise Exception('unexpected configure lines') txt = txt.replace(dline, '--with-pydebug ' + dline) # Debug has a different name. # (Currently expect to replace 12 instances of this). dline = 'python$(PYTHON_VER)' if PY38 else 'python$(PYTHON_VER)m' splitlen = len(txt.split(dline)) if splitlen != 13: raise RuntimeError(f'Unexpected configure line count {splitlen}.') txt = txt.replace( dline, 'python$(PYTHON_VER)d' if PY38 else 'python$(PYTHON_VER)dm') writefile('Makefile', txt) # Ok; let 'er rip. # (we run these in parallel so limit to 1 job a piece; # otherwise they inherit the -j12 or whatever from the top level) # (also this build seems to fail with multiple threads) run('make -j1 ' + { 'mac': 'Python-macOS', 'ios': 'Python-iOS', 'tvos': 'Python-tvOS' }[arch]) print('python build complete! (apple/' + arch + ')')
def gather() -> None: """Gather per-platform python headers, libs, and modules together. This assumes all embeddable py builds have been run successfully, and that PROJROOT is the cwd. """ # pylint: disable=too-many-locals do_android = True # First off, clear out any existing output. existing_dirs = [ os.path.join('src/external', d) for d in os.listdir('src/external') if d.startswith('python-') and d != 'python-notes.txt' ] existing_dirs += [ os.path.join('assets/src', d) for d in os.listdir('assets/src') if d.startswith('pylib-') ] if not do_android: existing_dirs = [d for d in existing_dirs if 'android' not in d] for existing_dir in existing_dirs: run('rm -rf "' + existing_dir + '"') apost2 = f'src/Python-{PY_VER_EXACT_ANDROID}/Android/sysroot' for buildtype in ['debug', 'release']: debug = buildtype == 'debug' bsuffix = '_debug' if buildtype == 'debug' else '' bsuffix2 = '-debug' if buildtype == 'debug' else '' libname = 'python' + PYVER + ('d' if debug else '') bases = { 'mac': f'build/python_apple_mac{bsuffix}/build/macOS', 'ios': f'build/python_apple_ios{bsuffix}/build/iOS', 'tvos': f'build/python_apple_tvos{bsuffix}/build/tvOS', 'android_arm': f'build/python_android_arm{bsuffix}/build', 'android_arm64': f'build/python_android_arm64{bsuffix}/build', 'android_x86': f'build/python_android_x86{bsuffix}/build', 'android_x86_64': f'build/python_android_x86_64{bsuffix}/build' } bases2 = { 'android_arm': f'build/python_android_arm{bsuffix}/{apost2}', 'android_arm64': f'build/python_android_arm64{bsuffix}/{apost2}', 'android_x86': f'build/python_android_x86{bsuffix}/{apost2}', 'android_x86_64': f'build/python_android_x86_64{bsuffix}/{apost2}' } # Note: only need pylib for the first in each group. builds: list[dict[str, Any]] = [{ 'name': 'macos', 'group': 'apple', 'headers': bases['mac'] + '/Support/Python/Headers', 'libs': [ bases['mac'] + '/Support/Python/libPython.a', bases['mac'] + '/Support/OpenSSL/libOpenSSL.a', bases['mac'] + '/Support/XZ/libxz.a', bases['mac'] + '/Support/BZip2/libbzip2.a', ], 'pylib': (bases['mac'] + f'/Python-{PY_VER_EXACT_APPLE}-macOS/lib'), }, { 'name': 'ios', 'group': 'apple', 'headers': bases['ios'] + '/Support/Python/Headers', 'libs': [ bases['ios'] + '/Support/Python/libPython.a', bases['ios'] + '/Support/OpenSSL/libOpenSSL.a', bases['ios'] + '/Support/XZ/libxz.a', bases['ios'] + '/Support/BZip2/libbzip2.a', ], }, { 'name': 'tvos', 'group': 'apple', 'headers': bases['tvos'] + '/Support/Python/Headers', 'libs': [ bases['tvos'] + '/Support/Python/libPython.a', bases['tvos'] + '/Support/OpenSSL/libOpenSSL.a', bases['tvos'] + '/Support/XZ/libxz.a', bases['tvos'] + '/Support/BZip2/libbzip2.a', ], }, { 'name': 'android_arm', 'group': 'android', 'headers': bases['android_arm'] + f'/usr/include/{libname}', 'libs': [ bases['android_arm'] + f'/usr/lib/lib{libname}.a', bases2['android_arm'] + '/usr/lib/libssl.a', bases2['android_arm'] + '/usr/lib/libcrypto.a', bases2['android_arm'] + '/usr/lib/liblzma.a', bases2['android_arm'] + '/usr/lib/libsqlite3.a', bases2['android_arm'] + '/usr/lib/libbz2.a', bases2['android_arm'] + '/usr/lib/libuuid.a', ], 'libinst': 'android_armeabi-v7a', 'pylib': (bases['android_arm'] + '/usr/lib/python' + PYVER), }, { 'name': 'android_arm64', 'group': 'android', 'headers': bases['android_arm64'] + f'/usr/include/{libname}', 'libs': [ bases['android_arm64'] + f'/usr/lib/lib{libname}.a', bases2['android_arm64'] + '/usr/lib/libssl.a', bases2['android_arm64'] + '/usr/lib/libcrypto.a', bases2['android_arm64'] + '/usr/lib/liblzma.a', bases2['android_arm64'] + '/usr/lib/libsqlite3.a', bases2['android_arm64'] + '/usr/lib/libbz2.a', bases2['android_arm64'] + '/usr/lib/libuuid.a', ], 'libinst': 'android_arm64-v8a', }, { 'name': 'android_x86', 'group': 'android', 'headers': bases['android_x86'] + f'/usr/include/{libname}', 'libs': [ bases['android_x86'] + f'/usr/lib/lib{libname}.a', bases2['android_x86'] + '/usr/lib/libssl.a', bases2['android_x86'] + '/usr/lib/libcrypto.a', bases2['android_x86'] + '/usr/lib/liblzma.a', bases2['android_x86'] + '/usr/lib/libsqlite3.a', bases2['android_x86'] + '/usr/lib/libbz2.a', bases2['android_x86'] + '/usr/lib/libuuid.a', ], 'libinst': 'android_x86', }, { 'name': 'android_x86_64', 'group': 'android', 'headers': bases['android_x86_64'] + f'/usr/include/{libname}', 'libs': [ bases['android_x86_64'] + f'/usr/lib/lib{libname}.a', bases2['android_x86_64'] + '/usr/lib/libssl.a', bases2['android_x86_64'] + '/usr/lib/libcrypto.a', bases2['android_x86_64'] + '/usr/lib/liblzma.a', bases2['android_x86_64'] + '/usr/lib/libsqlite3.a', bases2['android_x86_64'] + '/usr/lib/libbz2.a', bases2['android_x86_64'] + '/usr/lib/libuuid.a', ], 'libinst': 'android_x86_64', }] for build in builds: grp = build['group'] if not do_android and grp == 'android': continue builddir = f'src/external/python-{grp}{bsuffix2}' header_dst = os.path.join(builddir, 'include') lib_dst = os.path.join(builddir, 'lib') assets_src_dst = f'assets/src/pylib-{grp}' # Do some setup only once per group. if not os.path.exists(builddir): run('mkdir -p "' + builddir + '"') run('mkdir -p "' + lib_dst + '"') # Only pull modules into game assets on release pass. if not debug: # Copy system modules into the src assets # dir for this group. run('mkdir -p "' + assets_src_dst + '"') run('rsync --recursive --include "*.py"' ' --exclude __pycache__ --include "*/" --exclude "*" "' + build['pylib'] + '/" "' + assets_src_dst + '"') # Prune a bunch of modules we don't need to cut # down on size. run('cd "' + assets_src_dst + '" && rm -rf ' + ' '.join(PRUNE_LIB_NAMES)) # Some minor filtering to system scripts: # on iOS/tvOS, addusersitepackages() leads to a crash # due to _sysconfigdata_dm_ios_darwin module not existing, # so let's skip that. fname = f'{assets_src_dst}/site.py' txt = readfile(fname) txt = replace_one( txt, ' known_paths = addusersitepackages(known_paths)', ' # efro tweak: this craps out on ios/tvos.\n' ' # (and we don\'t use it anyway)\n' ' # known_paths = addusersitepackages(known_paths)') writefile(fname, txt) # Copy in a base set of headers (everything in a group should # be using the same headers) run(f'cp -r "{build["headers"]}" "{header_dst}"') # Clear whatever pyconfigs came across; we'll build our own # universal one below. run('rm ' + header_dst + '/pyconfig*') # Write a master pyconfig header that reroutes to each # platform's actual header. with open(header_dst + '/pyconfig.h', 'w', encoding='utf-8') as hfile: hfile.write( '#if BA_OSTYPE_MACOS\n' '#include "pyconfig-macos.h"\n\n' '#elif BA_OSTYPE_IOS\n' '#include "pyconfig-ios.h"\n\n' '#elif BA_OSTYPE_TVOS\n' '#include "pyconfig-tvos.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__arm__)\n' '#include "pyconfig-android_arm.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__aarch64__)\n' '#include "pyconfig-android_arm64.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__i386__)\n' '#include "pyconfig-android_x86.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__x86_64__)\n' '#include "pyconfig-android_x86_64.h"\n\n' '#else\n' '#error unknown platform\n\n' '#endif\n') # Now copy each build's config headers in with unique names. cfgs = [ f for f in os.listdir(build['headers']) if f.startswith('pyconfig') ] # Copy config headers to their filtered names. for cfg in cfgs: out = cfg.replace('pyconfig', 'pyconfig-' + build['name']) if cfg == 'pyconfig.h': # For platform's root pyconfig.h we need to filter # contents too (those headers can themselves include # others; ios for instance points to a arm64 and a # x86_64 variant). contents = readfile(build['headers'] + '/' + cfg) contents = contents.replace('pyconfig', 'pyconfig-' + build['name']) writefile(header_dst + '/' + out, contents) else: # other configs we just rename run('cp "' + build['headers'] + '/' + cfg + '" "' + header_dst + '/' + out + '"') # Copy in libs. If the lib gave a specific install name, # use that; otherwise use name. targetdir = lib_dst + '/' + build.get('libinst', build['name']) run('rm -rf "' + targetdir + '"') run('mkdir -p "' + targetdir + '"') for lib in build['libs']: run('cp "' + lib + '" "' + targetdir + '"') print('Great success!')
def build_android(rootdir: str, arch: str, debug: bool = False) -> None: """Run a build for android with the given architecture. (can be arm, arm64, x86, or x86_64) """ import subprocess builddir = 'build/python_android_' + arch + ('_debug' if debug else '') run('rm -rf "' + builddir + '"') run('mkdir -p build') run('git clone ' 'https://github.com/yan12125/python3-android.git "' + builddir + '"') os.chdir(builddir) # These builds require ANDROID_NDK to be set; make sure that's the case. os.environ['ANDROID_NDK'] = subprocess.check_output( [f'{rootdir}/tools/pcommand', 'android_sdk_utils', 'get-ndk-path']).decode().strip() # Disable builds for dependencies we don't use. ftxt = readfile('Android/build_deps.py') # ftxt = replace_one(ftxt, ' NCurses,\n', # '# NCurses,\n',) ftxt = replace_one( ftxt, ' ' 'BZip2, GDBM, LibFFI, LibUUID, OpenSSL, Readline, SQLite, XZ, ZLib,\n', ' ' 'BZip2, LibUUID, OpenSSL, SQLite, XZ, ZLib,\n', ) # Older ssl seems to choke on newer ndk layouts. ftxt = replace_one( ftxt, "source = 'https://www.openssl.org/source/openssl-1.1.1h.tar.gz'", "source = 'https://www.openssl.org/source/openssl-1.1.1l.tar.gz'") # Give ourselves a handle to patch the OpenSSL build. ftxt = replace_one( ftxt, ' # OpenSSL handles NDK internal paths by itself', ' # Ericf addition: do some patching:\n' ' self.run(["../../../../../../../tools/pcommand",' ' "python_android_patch_ssl"])\n' ' # OpenSSL handles NDK internal paths by itself', ) writefile('Android/build_deps.py', ftxt) # Tweak some things in the base build script; grab the right version # of Python and also inject some code to modify bits of python # after it is extracted. ftxt = readfile('build.sh') ftxt = replace_one(ftxt, 'PYVER=3.9.0', f'PYVER={PY_VER_EXACT_ANDROID}') ftxt = replace_one( ftxt, ' popd\n', f' ../../../tools/pcommand' f' python_android_patch Python-{PY_VER_EXACT_ANDROID}\n popd\n') writefile('build.sh', ftxt) # Ok, let 'er rip exargs = ' --with-pydebug' if debug else '' run(f'ARCH={arch} ANDROID_API=21 ./build.sh{exargs}') print('python build complete! (android/' + arch + ')')
def gather() -> None: """Gather per-platform python headers, libs, and modules together. This assumes all embeddable py builds have been run successfully, and that PROJROOT is the cwd. """ # pylint: disable=too-many-locals # First off, clear out any existing output. existing_dirs = [ os.path.join('src/external', d) for d in os.listdir('src/external') if d.startswith('python-') and d != 'python-notes.txt' ] existing_dirs += [ os.path.join('assets/src', d) for d in os.listdir('assets/src') if d.startswith('pylib-') ] for existing_dir in existing_dirs: efrotools.run('rm -rf "' + existing_dir + '"') # Build our set of site-packages that we'll bundle in addition # to the base system. # FIXME: Should we perhaps make this part more explicit?.. # we might get unexpected changes sneaking if we're just # pulling from installed python. But then again, anytime we're doing # a new python build/gather we should expect *some* changes even if # only at the build-system level since we pull some of that directly # from latest git stuff. efrotools.run('mkdir -p "assets/src/pylib-site-packages"') efrotools.run('cp "/usr/local/lib/python' + PYTHON_VERSION_MAJOR + '/site-packages/typing_extensions.py"' ' "assets/src/pylib-site-packages/"') for buildtype in ['debug', 'release']: debug = buildtype == 'debug' bsuffix = '_debug' if buildtype == 'debug' else '' bsuffix2 = '-debug' if buildtype == 'debug' else '' libname = 'python' + PYTHON_VERSION_MAJOR + ('dm' if debug else 'm') bases = { 'mac': f'build/python_apple_mac{bsuffix}/build/macOS', 'ios': f'build/python_apple_ios{bsuffix}/build/iOS', 'tvos': f'build/python_apple_tvos{bsuffix}/build/tvOS', 'android_arm': f'build/python_android_arm{bsuffix}/build/sysroot', 'android_arm64': f'build/python_android_arm64{bsuffix}/build/sysroot', 'android_x86': f'build/python_android_x86{bsuffix}/build/sysroot', 'android_x86_64': f'build/python_android_x86_64{bsuffix}/build/sysroot' } # Note: only need pylib for the first in each group. builds: List[Dict[str, Any]] = [{ 'name': 'macos', 'group': 'apple', 'headers': bases['mac'] + '/Support/Python/Headers', 'libs': [ bases['mac'] + '/Support/Python/libPython.a', bases['mac'] + '/Support/OpenSSL/libOpenSSL.a', bases['mac'] + '/Support/XZ/libxz.a' ], 'pylib': (bases['mac'] + '/python/lib/python' + PYTHON_VERSION_MAJOR), }, { 'name': 'ios', 'group': 'apple', 'headers': bases['ios'] + '/Support/Python/Headers', 'libs': [ bases['ios'] + '/Support/Python/libPython.a', bases['ios'] + '/Support/OpenSSL/libOpenSSL.a', bases['ios'] + '/Support/XZ/libxz.a' ], }, { 'name': 'tvos', 'group': 'apple', 'headers': bases['tvos'] + '/Support/Python/Headers', 'libs': [ bases['tvos'] + '/Support/Python/libPython.a', bases['tvos'] + '/Support/OpenSSL/libOpenSSL.a', bases['tvos'] + '/Support/XZ/libxz.a' ], }, { 'name': 'android_arm', 'group': 'android', 'headers': bases['android_arm'] + f'/usr/include/{libname}', 'libs': [ bases['android_arm'] + f'/usr/lib/lib{libname}.a', bases['android_arm'] + '/usr/lib/libssl.a', bases['android_arm'] + '/usr/lib/libcrypto.a', bases['android_arm'] + '/usr/lib/liblzma.a', bases['android_arm'] + '/usr/lib/libsqlite3.a' ], 'libinst': 'android_armeabi-v7a', 'pylib': (bases['android_arm'] + '/usr/lib/python' + PYTHON_VERSION_MAJOR), }, { 'name': 'android_arm64', 'group': 'android', 'headers': bases['android_arm64'] + f'/usr/include/{libname}', 'libs': [ bases['android_arm64'] + f'/usr/lib/lib{libname}.a', bases['android_arm64'] + '/usr/lib/libssl.a', bases['android_arm64'] + '/usr/lib/libcrypto.a', bases['android_arm64'] + '/usr/lib/liblzma.a', bases['android_arm64'] + '/usr/lib/libsqlite3.a' ], 'libinst': 'android_arm64-v8a', }, { 'name': 'android_x86', 'group': 'android', 'headers': bases['android_x86'] + f'/usr/include/{libname}', 'libs': [ bases['android_x86'] + f'/usr/lib/lib{libname}.a', bases['android_x86'] + '/usr/lib/libssl.a', bases['android_x86'] + '/usr/lib/libcrypto.a', bases['android_x86'] + '/usr/lib/liblzma.a', bases['android_x86'] + '/usr/lib/libsqlite3.a' ], 'libinst': 'android_x86', }, { 'name': 'android_x86_64', 'group': 'android', 'headers': bases['android_x86_64'] + f'/usr/include/{libname}', 'libs': [ bases['android_x86_64'] + f'/usr/lib/lib{libname}.a', bases['android_x86_64'] + '/usr/lib/libssl.a', bases['android_x86_64'] + '/usr/lib/libcrypto.a', bases['android_x86_64'] + '/usr/lib/liblzma.a', bases['android_x86_64'] + '/usr/lib/libsqlite3.a' ], 'libinst': 'android_x86_64', }] for build in builds: grp = build['group'] builddir = f'src/external/python-{grp}{bsuffix2}' header_dst = os.path.join(builddir, 'include') lib_dst = os.path.join(builddir, 'lib') assets_src_dst = f'assets/src/pylib-{grp}' # Do some setup only once per group. if not os.path.exists(builddir): efrotools.run('mkdir -p "' + builddir + '"') efrotools.run('mkdir -p "' + lib_dst + '"') # Only pull modules into game assets on release pass. if not debug: # Copy system modules into the src assets # dir for this group. efrotools.run('mkdir -p "' + assets_src_dst + '"') efrotools.run( 'rsync --recursive --include "*.py"' ' --exclude __pycache__ --include "*/" --exclude "*" "' + build['pylib'] + '/" "' + assets_src_dst + '"') # Prune a bunch of modules we don't need to cut # down on size. prune = [ 'config-*', 'idlelib', 'lib-dynload', 'lib2to3', 'multiprocessing', 'pydoc_data', 'site-packages', 'ensurepip', 'tkinter', 'wsgiref', 'distutils', 'turtle.py', 'turtledemo', 'test', 'sqlite3/test', 'unittest', 'dbm', 'venv', 'ctypes/test', 'imaplib.py', '_sysconfigdata_*' ] efrotools.run('cd "' + assets_src_dst + '" && rm -rf ' + ' '.join(prune)) # Some minor filtering to system scripts: # on iOS/tvOS, addusersitepackages() leads to a crash # due to _sysconfigdata_dm_ios_darwin module not existing, # so let's skip that. fname = f'{assets_src_dst}/site.py' txt = efrotools.readfile(fname) txt = efrotools.replace_one( txt, ' known_paths = addusersitepackages(known_paths)', ' # efro tweak: this craps out on ios/tvos.\n' ' # (and we don\'t use it anyway)\n' ' # known_paths = addusersitepackages(known_paths)') efrotools.writefile(fname, txt) # Copy in a base set of headers (everything in a group should # be using the same headers) efrotools.run(f'cp -r "{build["headers"]}" "{header_dst}"') # Clear whatever pyconfigs came across; we'll build our own # universal one below. efrotools.run('rm ' + header_dst + '/pyconfig*') # Write a master pyconfig header that reroutes to each # platform's actual header. with open(header_dst + '/pyconfig.h', 'w') as hfile: hfile.write( '#if BA_OSTYPE_MACOS\n' '#include "pyconfig-macos.h"\n\n' '#elif BA_OSTYPE_IOS\n' '#include "pyconfig-ios.h"\n\n' '#elif BA_OSTYPE_TVOS\n' '#include "pyconfig-tvos.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__arm__)\n' '#include "pyconfig-android_arm.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__aarch64__)\n' '#include "pyconfig-android_arm64.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__i386__)\n' '#include "pyconfig-android_x86.h"\n\n' '#elif BA_OSTYPE_ANDROID and defined(__x86_64__)\n' '#include "pyconfig-android_x86_64.h"\n\n' '#else\n' '#error unknown platform\n\n' '#endif\n') # Now copy each build's config headers in with unique names. cfgs = [ f for f in os.listdir(build['headers']) if f.startswith('pyconfig') ] # Copy config headers to their filtered names. for cfg in cfgs: out = cfg.replace('pyconfig', 'pyconfig-' + build['name']) if cfg == 'pyconfig.h': # For platform's root pyconfig.h we need to filter # contents too (those headers can themselves include # others; ios for instance points to a arm64 and a # x86_64 variant). contents = efrotools.readfile(build['headers'] + '/' + cfg) contents = contents.replace('pyconfig', 'pyconfig-' + build['name']) efrotools.writefile(header_dst + '/' + out, contents) else: # other configs we just rename efrotools.run('cp "' + build['headers'] + '/' + cfg + '" "' + header_dst + '/' + out + '"') # Copy in libs. If the lib gave a specific install name, # use that; otherwise use name. targetdir = lib_dst + '/' + build.get('libinst', build['name']) efrotools.run('rm -rf "' + targetdir + '"') efrotools.run('mkdir -p "' + targetdir + '"') for lib in build['libs']: efrotools.run('cp "' + lib + '" "' + targetdir + '"') print('Great success!')
def get_target(path: str) -> None: """Fetch a target path from the cache, downloading if need be.""" from efro.error import CleanError from efrotools import run path = _project_centric_path(path) with open(CACHE_MAP_NAME, encoding='utf-8') as infile: efrocachemap = json.loads(infile.read()) if path not in efrocachemap: raise RuntimeError(f'Path not found in efrocache: {path}') url = efrocachemap[path] subpath = '/'.join(url.split('/')[-3:]) local_cache_path = os.path.join(CACHE_DIR_NAME, subpath) local_cache_path_dl = local_cache_path + '.download' hashval = ''.join(subpath.split('/')) # First off: if there's already a file in place, check its hash. # If it matches the cache, we can just update its timestamp and # call it a day. if os.path.isfile(path): existing_hash = get_file_hash(path) if existing_hash == hashval: os.utime(path, None) print(f'Refreshing from cache: {path}') return # Ok there's not a valid file in place already. # Clear out whatever is there to start with. if os.path.exists(path): os.unlink(path) # Now if we don't have this entry in our local cache, # download it. if not os.path.exists(local_cache_path): os.makedirs(os.path.dirname(local_cache_path), exist_ok=True) print(f'Downloading: {Clr.BLU}{path}{Clr.RST}') result = subprocess.run( f'curl --fail --silent {url} --output {local_cache_path_dl}', shell=True, check=False) # We prune old cache files on the server, so its possible for one to # be trying to build something the server can no longer provide. # try to explain the situation. if result.returncode == 22: raise CleanError('Server gave an error.' ' Old build files may no longer be available;' ' make sure you are using a recent commit.') if result.returncode != 0: raise CleanError('Download failed; is your internet working?') run(f'mv {local_cache_path_dl} {local_cache_path}') # Ok we should have a valid .tar.gz file in our cache dir at this point. # Just expand it and it get placed wherever it belongs. # Strangely, decompressing lots of these simultaneously leads to occasional # "File does not exist" errors when running on Windows Subsystem for Linux. # There should be no overlap in files getting written, but perhaps # something about how tar rebuilds the directory structure causes clashes. # It seems that just explicitly creating necessary directories first # prevents the problem. os.makedirs(os.path.dirname(path), exist_ok=True) print(f'Extracting: {path}') try: subprocess.run(['tar', '-zxf', local_cache_path], check=True) except Exception: # If something goes wrong, try to make sure we don't leave a half # decompressed file lying around or whatnot. print(f"Error expanding cache archive for '{local_cache_path}'.") if os.path.exists(local_cache_path): os.remove(local_cache_path) raise # The file will wind up with the timestamp it was compressed with, # so let's update its timestamp or else it will still be considered # dirty. run(f'touch {path}') if not os.path.exists(path): raise RuntimeError(f'File {path} did not wind up as expected.')
def build_android(rootdir: str, arch: str, debug: bool = False) -> None: """Run a build for android with the given architecture. (can be arm, arm64, x86, or x86_64) """ # pylint: disable=too-many-statements import subprocess import platform builddir = 'build/python_android_' + arch + ('_debug' if debug else '') run('rm -rf "' + builddir + '"') run('mkdir -p build') run('git clone ' 'https://github.com/yan12125/python3-android.git "' + builddir + '"') os.chdir(builddir) # It seems we now need 'autopoint' as part of this build, but on mac it # is not available on the normal path, but only as part of the keg-only # gettext homebrew formula. if platform.system() == 'Darwin': if (subprocess.run('which autopoint', shell=True, check=False).returncode != 0): print('Updating path for mac autopoint...') appath = subprocess.run('brew ls gettext | grep bin/autopoint', shell=True, check=True, capture_output=True) appathout = os.path.dirname(appath.stdout.decode().strip()) os.environ['PATH'] += (':' + appathout) print(f'ADDED "{appathout}" TO SYS PATH...') # Commit from Jan 8, 2020. Right after this, the build system was switched # a a completely new minimal one which will take some work to update here. # Punting on that for now... (tentative plan is to try and adopt the new # one when we update for Python 3.10 in a year or two). if True: # pylint: disable=using-constant-test run('git checkout 9adbcfaca37f40b7a86381f83f0f6af4187233ae') ftxt = readfile('pybuild/env.py') # Set the packages we build. ftxt = replace_one( ftxt, 'packages = (', "packages = ('zlib', 'sqlite', 'xz'," + (" 'openssl'" if ENABLE_OPENSSL else '') + ')\n# packages = (') # Don't wanna bother with gpg signing stuff. ftxt = replace_one(ftxt, 'verify_source = True', 'verify_source = False') # Sub in the min api level we're targeting. ftxt = replace_one(ftxt, 'android_api_level = 21', 'android_api_level = 21') ftxt = replace_one(ftxt, "target_arch = 'arm'", "target_arch = '" + arch + "'") writefile('pybuild/env.py', ftxt) ftxt = readfile('Makefile') # This needs to be python3 for us. ftxt = replace_one(ftxt, 'PYTHON?=python\n', 'PYTHON?=python3\n') writefile('Makefile', ftxt) ftxt = readfile('pybuild/packages/python.py') # We currently build as a static lib. ftxt = replace_one(ftxt, " '--enable-shared',\n", '') ftxt = replace_one( ftxt, "super().__init__('https://github.com/python/cpython/')", "super().__init__('https://github.com/python/cpython/'" f", branch='{PYVER}')") # Turn ipv6 on (curious why its turned off here?...) # Also, turn on low level debugging features for our debug builds. ftxt = replace_one(ftxt, "'--disable-ipv6',", "'--enable-ipv6',") if debug: ftxt = replace_one(ftxt, "'./configure',", "'./configure', '--with-pydebug',") # We don't use this stuff so lets strip it out to simplify. ftxt = replace_one(ftxt, "'--without-ensurepip',", '') # This builds all modules as dynamic libs, but we want to be consistent # with our other embedded builds and just static-build the ones we # need... so to change that we'll need to add a hook for ourself after # python is downloaded but before it is built so we can muck with it. ftxt = replace_one( ftxt, ' def build(self):', ' def build(self):\n import os\n' ' if os.system(\'"' + rootdir + '/tools/pcommand" python_android_patch "' + os.getcwd() + '"\') != 0: raise Exception("patch apply failed")') writefile('pybuild/packages/python.py', ftxt) # Set these to particular releases to use those. # py_commit = '580fbb018fd0844806119614d752b41fc69660f9' # 3.8.5 py_commit = '6503f05dd59e26a9986bdea097b3da9b3546f45b' # 3.8.7 # cpython-source-deps stuff started failing for OpenSSL on Jan 8 2021. # Pinning it to an older one for now. py_ssl_commit = '7f34c3085feb4692bbbb6c8b19d053ebc5049dad' # From 6/12/20 commit_lines = '' if py_commit is not None: commit_lines += (' if self.source_url == ' "'https://github.com/python/cpython/':\n" " run_in_dir(['git', 'checkout', '" + py_commit + "'], self.source_dir)\n") if py_ssl_commit is not None: commit_lines += (' if self.source_url == ' "'https://github.com/python/cpython-source-deps'" " and self.branch == 'openssl-1.1.1':\n" " run_in_dir(['git', 'checkout', '" + py_ssl_commit + "'], self.source_dir)\n") ftxt = readfile('pybuild/source.py') # Check out a particular commit right after the clone. ftxt = replace_one( ftxt, "'git', 'clone', '--single-branch', '-b'," ' self.branch, self.source_url, self.dest])', "'git', 'clone', '-b'," ' self.branch, self.source_url, self.dest])\n' ' # efro: hack to get exact commits we want.\n' " print('DOING URL', self.source_url)\n" + commit_lines) writefile('pybuild/source.py', ftxt) ftxt = readfile('pybuild/util.py') # Still don't wanna bother with gpg signing stuff. ftxt = replace_one( ftxt, 'def gpg_verify_file(sig_filename, filename, validpgpkeys):\n', 'def gpg_verify_file(sig_filename, filename, validpgpkeys):\n' ' print("gpg-verify disabled by ericf")\n' ' return\n') writefile('pybuild/util.py', ftxt) # These builds require ANDROID_NDK to be set, so make sure that's # the case. os.environ['ANDROID_NDK'] = subprocess.check_output( [rootdir + '/tools/android_sdk_utils', 'get-ndk-path']).decode().strip() # Ok, let 'er rip # (we often run these builds in parallel so limit to 1 job a piece; # otherwise they each inherit the -j12 or whatever from the top level). run('make -j1') print('python build complete! (android/' + arch + ')')
def update_cache(makefile_dirs: List[str]) -> None: """Given a list of directories containing makefiles, update caches.""" import multiprocessing from efrotools import run cpus = multiprocessing.cpu_count() fnames1: List[str] = [] fnames2: List[str] = [] fhashes1: Set[str] = set() for path in makefile_dirs: cdp = f'cd {path} && ' if path else '' # First, make sure all cache files are built. mfpath = os.path.join(path, 'Makefile') print(f'Building cache targets for {mfpath}...') subprocess.run(f'{cdp}make -j{cpus} efrocache-build', shell=True, check=True) rawpaths = subprocess.run(f'{cdp}make efrocache-list', shell=True, check=True, capture_output=True).stdout.decode().split() # Make sure the paths they gave were relative. for rawpath in rawpaths: if rawpath.startswith('/'): raise RuntimeError(f'Invalid path returned for caching ' f'(absolute paths not allowed): {rawpath}') # Break these into 2 lists, one of which will be included in the # starter-cache. for rawpath in rawpaths: fullpath = os.path.join(path, rawpath) # The main reason for this cache is to reduce round trips to # the staging server for tiny files, so let's include small files # only here. For larger stuff its ok to have a request per file. if os.path.getsize(fullpath) < 100000: fnames1.append(fullpath) else: fnames2.append(fullpath) staging_dir = 'build/efrocache' mapping_file = 'build/efrocachemap' run(f'rm -rf {staging_dir}') run(f'mkdir -p {staging_dir}') _write_cache_files(fnames1, fnames2, staging_dir, mapping_file) print(f"Starter cache includes {len(fnames1)} items;" f" excludes {len(fnames2)}") # Sync all individual cache files to the staging server. print('Pushing cache to staging...', flush=True) run('rsync --progress --recursive build/efrocache/' ' [email protected]:files.ballistica.net/cache/ba1/') # Now generate the starter cache on the server.. run('ssh -oBatchMode=yes -oStrictHostKeyChecking=yes [email protected]' ' "cd files.ballistica.net/cache/ba1 && python3 genstartercache.py"') print(f'Cache update successful!')
def get_target(path: str) -> None: """Fetch a target path from the cache, downloading if need be.""" from efro.error import CleanError from efrotools import run with open(CACHE_MAP_NAME) as infile: efrocachemap = json.loads(infile.read()) if path not in efrocachemap: raise RuntimeError(f'Path not found in efrocache: {path}') url = efrocachemap[path] subpath = '/'.join(url.split('/')[-3:]) local_cache_path = os.path.join(CACHE_DIR_NAME, subpath) local_cache_path_dl = local_cache_path + '.download' hashval = ''.join(subpath.split('/')) # First off: if there's already a file in place, check its hash. # If it matches the cache, we can just update its timestamp and # call it a day. if os.path.isfile(path): existing_hash = get_file_hash(path) if existing_hash == hashval: os.utime(path, None) print(f'Refreshing from cache: {path}') return # Ok there's not a valid file in place already. # Clear out whatever is there to start with. if os.path.exists(path): os.unlink(path) # Now if we don't have this entry in our local cache, # download it. if not os.path.exists(local_cache_path): os.makedirs(os.path.dirname(local_cache_path), exist_ok=True) print(f'Downloading: {Clr.SBLU}{path}{Clr.RST}') result = subprocess.run( f'curl --fail --silent {url} --output {local_cache_path_dl}', shell=True, check=False) # We prune old cache files on the server, so its possible for one to # be trying to build something the server can no longer provide. # try to explain the situation. if result.returncode == 22: raise CleanError('Server gave an error.' ' Old build files may no longer be available;' ' make sure you are using a recent commit.') if result.returncode != 0: raise CleanError('Download failed; is your internet working?') run(f'mv {local_cache_path_dl} {local_cache_path}') # Ok we should have a valid .tar.gz file in our cache dir at this point. # Just expand it and it get placed wherever it belongs. print(f'Extracting: {path}') run(f'tar -zxf {local_cache_path}') # The file will wind up with the timestamp it was compressed with, # so let's update its timestamp or else it will still be considered # dirty. run(f'touch {path}') if not os.path.exists(path): raise RuntimeError(f'File {path} did not wind up as expected.')