def prepare_build_dir(self, arch): if self.src_filename is None: print('IncludedFilesBehaviour failed: no src_filename specified') exit(1) shprint(sh.rm, '-rf', self.get_build_dir(arch)) shprint(sh.cp, '-a', join(self.get_recipe_dir(), self.src_filename), self.get_build_dir(arch))
def install_hostpython_package(self, arch): env = self.get_hostrecipe_env(arch) real_hostpython = sh.Command(self.real_hostpython_location) shprint(real_hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(dirname(self.real_hostpython_location)), '--install-lib=Lib/site-packages', _env=env, *self.setup_extra_args)
def build_arch(self, arch): recipe_build_dir = self.get_build_dir(arch.arch) # Create a subdirectory to actually perform the build build_dir = join(recipe_build_dir, self.build_subdir) ensure_dir(build_dir) if not exists(join(build_dir, 'python')): with current_directory(recipe_build_dir): # Configure the build with current_directory(build_dir): if not exists('config.status'): shprint( sh.Command(join(recipe_build_dir, 'configure'))) # Create the Setup file. This copying from Setup.dist # seems to be the normal and expected procedure. shprint(sh.cp, join('Modules', 'Setup.dist'), join(build_dir, 'Modules', 'Setup')) result = shprint(sh.make, '-C', build_dir) else: info('Skipping {name} ({version}) build, as it has already ' 'been completed'.format(name=self.name, version=self.version)) self.ctx.hostpython = join(build_dir, 'python')
def rebuild_compiled_components(self, arch, env): info('Rebuilding compiled components in {}'.format(self.name)) hostpython = sh.Command(self.real_hostpython_location) shprint(hostpython, 'setup.py', 'clean', '--all', _env=env) shprint(hostpython, 'setup.py', self.build_cmd, '-v', _env=env, *self.setup_extra_args)
def install_libs(self, arch, *libs): libs_dir = self.ctx.get_libs_dir(arch.arch) if not libs: warning('install_libs called with no libraries to install!') return args = libs + (libs_dir,) shprint(sh.cp, *args)
def prebuild_arch(self, arch): super(VlcRecipe, self).prebuild_arch(arch) build_dir = self.get_build_dir(arch.arch) port_dir = join(build_dir, 'vlc-port-android') if self.ENV_LIBVLC_AAR in environ: aar = environ.get(self.ENV_LIBVLC_AAR) if isdir(aar): aar = join(aar, 'libvlc-{}.aar'.format(self.version)) if not isfile(aar): warning("Error: {} is not valid libvlc-<ver>.aar bundle".format(aar)) info("check {} environment!".format(self.ENV_LIBVLC_AAR)) exit(1) self.aars[arch] = aar else: aar_path = join(port_dir, 'libvlc', 'build', 'outputs', 'aar') self.aars[arch] = aar = join(aar_path, 'libvlc-{}.aar'.format(self.version)) warning("HINT: set path to precompiled libvlc-<ver>.aar bundle " "in {} environment!".format(self.ENV_LIBVLC_AAR)) info("libvlc-<ver>.aar should build " "from sources at {}".format(port_dir)) if not isfile(join(port_dir, 'compile.sh')): info("clone vlc port for android sources from {}".format( self.port_git)) shprint(sh.git, 'clone', self.port_git, port_dir, _tail=20, _critical=True)
def prepare_build_dir(self, arch): if self.src_filename is None: raise BuildInterruptingException( 'IncludedFilesBehaviour failed: no src_filename specified') shprint(sh.rm, '-rf', self.get_build_dir(arch)) shprint(sh.cp, '-a', join(self.get_recipe_dir(), self.src_filename), self.get_build_dir(arch))
def run_pymodules_install(ctx, modules): modules = filter(ctx.not_has_package, modules) if not modules: info('There are no Python modules to install, skipping') return info('The requirements ({}) don\'t have recipes, attempting to install ' 'them with pip'.format(', '.join(modules))) info('If this fails, it may mean that the module has compiled ' 'components and needs a recipe.') venv = sh.Command(ctx.virtualenv) with current_directory(join(ctx.build_dir)): shprint(venv, '--python=python2.7', 'venv') info('Creating a requirements.txt file for the Python modules') with open('requirements.txt', 'w') as fileh: for module in modules: fileh.write('{}\n'.format(module)) info('Installing Python modules with pip') info('If this fails with a message about /bin/false, this ' 'probably means the package cannot be installed with ' 'pip as it needs a compilation recipe.') # This bash method is what old-p4a used # It works but should be replaced with something better shprint(sh.bash, '-c', ( "source venv/bin/activate && env CC=/bin/false CXX=/bin/false " "PYTHONPATH={0} pip install --target '{0}' --no-deps -r requirements.txt" ).format(ctx.get_site_packages_dir()))
def build_arch(self, arch): # Create a temporary folder to add to link path with a fake librt.so: fake_librt_temp_folder = join( self.get_build_dir(arch.arch), "p4a-librt-recipe-tempdir" ) if not exists(fake_librt_temp_folder): makedirs(fake_librt_temp_folder) # Set symlinks, and make sure to update them on every build run: if exists(join(fake_librt_temp_folder, "librt.so")): remove(join(fake_librt_temp_folder, "librt.so")) shprint(sh.ln, '-sf', self.libc_path + '.so', join(fake_librt_temp_folder, "librt.so"), ) if exists(join(fake_librt_temp_folder, "librt.a")): remove(join(fake_librt_temp_folder, "librt.a")) shprint(sh.ln, '-sf', self.libc_path + '.a', join(fake_librt_temp_folder, "librt.a"), ) # Add folder as -L link option for all recipes if not done yet: if fake_librt_temp_folder not in arch.extra_global_link_paths: arch.extra_global_link_paths.append( fake_librt_temp_folder )
def prebuild_arch(self, arch): if not self.is_patched(arch): super(ReportLabRecipe, self).prebuild_arch(arch) self.apply_patch('patches/fix-setup.patch', arch.arch) recipe_dir = self.get_build_dir(arch.arch) shprint(sh.touch, os.path.join(recipe_dir, '.patched')) ft = self.get_recipe('freetype', self.ctx) ft_dir = ft.get_build_dir(arch.arch) ft_lib_dir = os.environ.get('_FT_LIB_', os.path.join(ft_dir, 'objs', '.libs')) ft_inc_dir = os.environ.get('_FT_INC_', os.path.join(ft_dir, 'include')) tmp_dir = os.path.normpath(os.path.join(recipe_dir, "..", "..", "tmp")) info('reportlab recipe: recipe_dir={}'.format(recipe_dir)) info('reportlab recipe: tmp_dir={}'.format(tmp_dir)) info('reportlab recipe: ft_dir={}'.format(ft_dir)) info('reportlab recipe: ft_lib_dir={}'.format(ft_lib_dir)) info('reportlab recipe: ft_inc_dir={}'.format(ft_inc_dir)) with current_directory(recipe_dir): sh.ls('-lathr') ensure_dir(tmp_dir) pfbfile = os.path.join(tmp_dir, "pfbfer-20070710.zip") if not os.path.isfile(pfbfile): sh.wget("http://www.reportlab.com/ftp/pfbfer-20070710.zip", "-O", pfbfile) sh.unzip("-u", "-d", os.path.join(recipe_dir, "src", "reportlab", "fonts"), pfbfile) if os.path.isfile("setup.py"): with open('setup.py', 'rb') as f: text = f.read().replace('_FT_LIB_', ft_lib_dir).replace('_FT_INC_', ft_inc_dir) with open('setup.py', 'wb') as f: f.write(text)
def build_arch(self, arch): env = self.get_recipe_env(arch) env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/png -I{jni_path}/jpeg'.format( jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/sdl/include -I{jni_path}/sdl_mixer'.format( jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) env['CFLAGS'] = env['CFLAGS'] + ' -I{jni_path}/sdl_ttf -I{jni_path}/sdl_image'.format( jni_path=join(self.ctx.bootstrap.build_dir, 'jni')) debug('pygame cflags', env['CFLAGS']) env['LDFLAGS'] = env['LDFLAGS'] + ' -L{libs_path} -L{src_path}/obj/local/{arch} -lm -lz'.format( libs_path=self.ctx.libs_dir, src_path=self.ctx.bootstrap.build_dir, arch=env['ARCH']) env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink') with current_directory(self.get_build_dir(arch.arch)): info('hostpython is ' + self.ctx.hostpython) hostpython = sh.Command(self.ctx.hostpython) shprint(hostpython, 'setup.py', 'install', '-O2', _env=env, _tail=10, _critical=True) info('strip is ' + env['STRIP']) build_lib = glob.glob('./build/lib*') assert len(build_lib) == 1 print('stripping pygame') shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', env['STRIP'], '{}', ';') python_install_path = join(self.ctx.build_dir, 'python-install') warning('Should remove pygame tests etc. here, but skipping for now')
def strip_libraries(self, arch): info('Stripping libraries') if self.ctx.python_recipe.from_crystax: info('Python was loaded from CrystaX, skipping strip') return env = arch.get_env() strip = which('arm-linux-androideabi-strip', env['PATH']) if strip is None: warning('Can\'t find strip in PATH...') return strip = sh.Command(strip) if self.ctx.python_recipe.name == 'python2': filens = shprint(sh.find, join(self.dist_dir, 'private'), join(self.dist_dir, 'libs'), '-iname', '*.so', _env=env).stdout.decode('utf-8') else: filens = shprint(sh.find, join(self.dist_dir, '_python_bundle', '_python_bundle', 'modules'), join(self.dist_dir, 'libs'), '-iname', '*.so', _env=env).stdout.decode('utf-8') logger.info('Stripping libraries in private dir') for filen in filens.split('\n'): try: strip(filen, _env=env) except sh.ErrorReturnCode_1: logger.debug('Failed to strip ' + filen)
def install_python_package(self, arch): env = self.get_recipe_env(arch) info('Installing {} into site-packages'.format(self.name)) with current_directory(join(self.get_build_dir(arch.arch), 'python')): hostpython = sh.Command(self.hostpython_location) if self.ctx.python_recipe.from_crystax: hpenv = env.copy() shprint(hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(self.ctx.get_python_install_dir()), '--install-lib=.', '--cpp_implementation', _env=hpenv, *self.setup_extra_args) else: hppath = join(dirname(self.hostpython_location), 'Lib', 'site-packages') hpenv = env.copy() if 'PYTHONPATH' in hpenv: hpenv['PYTHONPATH'] = ':'.join([hppath] + hpenv['PYTHONPATH'].split(':')) else: hpenv['PYTHONPATH'] = hppath shprint(hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(self.ctx.get_python_install_dir()), '--install-lib=lib/python2.7/site-packages', '--cpp_implementation', _env=hpenv, *self.setup_extra_args)
def build_arch(self, arch): env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): if not exists('configure'): shprint(sh.Command('./autogen.sh'), _env=env) shprint(sh.Command('autoreconf -vif'), _env=env) shprint(sh.Command('./configure'), '--host=' + arch.toolchain_prefix, '--prefix=' + self.ctx.get_python_install_dir(), '--enable-shared', _env=env) shprint(sh.make, '-j5', 'libffi.la', _env=env) # dlname = None # with open(join(host, 'libffi.la')) as f: # for line in f: # if line.startswith('dlname='): # dlname = line.strip()[8:-1] # break # # if not dlname or not exists(join(host, '.libs', dlname)): # raise RuntimeError('failed to locate shared object! ({})' # .format(dlname)) # shprint(sh.sed, '-i', 's/^dlname=.*$/dlname=\'libffi.so\'/', join(host, 'libffi.la')) shprint(sh.cp, '-t', self.ctx.get_libs_dir(arch.arch), join(self.get_host(arch), '.libs', 'libffi.so')) #,
def apply_patches(self, arch, build_dir=None): '''Apply any patches for the Recipe. .. versionchanged:: 0.6.0 Add ability to apply patches from any dir via kwarg `build_dir`''' if self.patches: info_main('Applying patches for {}[{}]' .format(self.name, arch.arch)) if self.is_patched(arch): info_main('{} already patched, skipping'.format(self.name)) return build_dir = build_dir if build_dir else self.get_build_dir(arch.arch) for patch in self.patches: if isinstance(patch, (tuple, list)): patch, patch_check = patch if not patch_check(arch=arch, recipe=self): continue self.apply_patch( patch.format(version=self.version, arch=arch.arch), arch.arch, build_dir=build_dir) shprint(sh.touch, join(build_dir, '.patched'))
def apk(self, args): '''Create an APK using the given distribution.''' # AND: Need to add a parser here for any extra options # parser = argparse.ArgumentParser( # description='Build an APK') # args = parser.parse_args(args) ctx = self.ctx dist = self._dist # Manually fixing these arguments at the string stage is # unsatisfactory and should probably be changed somehow, but # we can't leave it until later as the build.py scripts assume # they are in the current directory. for i, arg in enumerate(args[:-1]): if arg in ('--dir', '--private'): args[i+1] = realpath(expanduser(args[i+1])) build = imp.load_source('build', join(dist.dist_dir, 'build.py')) with current_directory(dist.dist_dir): build.parse_args(args) shprint(sh.ant, 'debug', _tail=20, _critical=True) # AND: This is very crude, needs improving. Also only works # for debug for now. info_main('# Copying APK to current directory') apks = glob.glob(join(dist.dist_dir, 'bin', '*-*-debug.apk')) if len(apks) == 0: raise ValueError('Couldn\'t find the built APK') if len(apks) > 1: info('More than one built APK found...guessing you ' 'just built {}'.format(apks[-1])) shprint(sh.cp, apks[-1], './')
def apply_patch(self, filename, arch): """ Apply a patch from the current recipe directory into the current build directory. """ info("Applying patch {}".format(filename)) filename = join(self.recipe_dir, filename) shprint(sh.patch, "-t", "-d", self.get_build_dir(arch), "-p1", "-i", filename, _tail=10)
def distribute_libs(self, arch, src_dirs, wildcard='*', dest_dir="libs"): '''Copy existing arch libs from build dirs to current dist dir.''' info('Copying libs') tgt_dir = join(dest_dir, arch.arch) ensure_dir(tgt_dir) for src_dir in src_dirs: for lib in glob.glob(join(src_dir, wildcard)): shprint(sh.cp, '-a', lib, tgt_dir)
def build_cython_components(self, arch): info('Cythonizing anything necessary in {}'.format(self.name)) env = self.get_recipe_env(arch) if self.ctx.python_recipe.from_crystax: command = sh.Command('python{}'.format(self.ctx.python_recipe.version)) site_packages_dirs = command( '-c', 'import site; print("\\n".join(site.getsitepackages()))') site_packages_dirs = site_packages_dirs.stdout.decode('utf-8').split('\n') # env['PYTHONPATH'] = '/usr/lib/python3.5/site-packages/:/usr/lib/python3.5' if 'PYTHONPATH' in env: env['PYTHONPATH'] = env + ':{}'.format(':'.join(site_packages_dirs)) else: env['PYTHONPATH'] = ':'.join(site_packages_dirs) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.ctx.hostpython) # hostpython = sh.Command('python3.5') shprint(hostpython, '-c', 'import sys; print(sys.path)', _env=env) print('cwd is', realpath(curdir)) info('Trying first build of {} to get cython files: this is ' 'expected to fail'.format(self.name)) manually_cythonise = False try: shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, *self.setup_extra_args) except sh.ErrorReturnCode_1: print() info('{} first build failed (as expected)'.format(self.name)) manually_cythonise = True if manually_cythonise: info('Running cython where appropriate') cyenv = env.copy() if 'CYTHONPATH' in cyenv: cyenv['PYTHONPATH'] = cyenv['CYTHONPATH'] elif 'PYTHONPATH' in cyenv: del cyenv['PYTHONPATH'] cython = 'cython' if self.ctx.python_recipe.from_crystax else self.ctx.cython cython_cmd = 'find "{}" -iname *.pyx | xargs "{}"'.format( self.get_build_dir(arch.arch), cython) shprint(sh.sh, '-c', cython_cmd, _env=cyenv) info('ran cython') shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, _tail=20, _critical=True, *self.setup_extra_args) else: info('First build appeared to complete correctly, skipping manual' 'cythonising.') print('stripping') build_lib = glob.glob('./build/lib*') shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', env['STRIP'], '{}', ';', _env=env) print('stripped!?')
def fry_eggs(self, sitepackages): info('Frying eggs in {}'.format(sitepackages)) for d in listdir(sitepackages): rd = join(sitepackages, d) if isdir(rd) and d.endswith('.egg'): info(' ' + d) files = [join(rd, f) for f in listdir(rd) if f != 'EGG-INFO'] shprint(sh.mv, '-t', sitepackages, *files) shprint(sh.rm, '-rf', d)
def prebuild_arch(self, arch): super(JpegRecipe, self).prebuild_arch(arch) build_dir = self.get_build_dir(arch.arch) app_mk = join(build_dir, 'Application.mk') if not exists(app_mk): shprint(sh.cp, join(self.get_recipe_dir(), 'Application.mk'), app_mk) jni_ln = join(build_dir, 'jni') if not exists(jni_ln): shprint(sh.ln, '-s', build_dir, jni_ln)
def build_arch(self, arch, *extra_args): super(NDKRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): shprint( sh.ndk_build, 'V=1', 'APP_PLATFORM=android-' + str(self.ctx.ndk_api), 'APP_ABI=' + arch.arch, *extra_args, _env=env )
def download(self): if self.url is None: info('Skipping {} download as no URL is set'.format(self.name)) return url = self.versioned_url ma = match(u'^(.+)#md5=([0-9a-f]{32})$', url) if ma: # fragmented URL? if self.md5sum: raise ValueError( ('Received md5sum from both the {} recipe ' 'and its url').format(self.name)) url = ma.group(1) expected_md5 = ma.group(2) else: expected_md5 = self.md5sum shprint(sh.mkdir, '-p', join(self.ctx.packages_path, self.name)) with current_directory(join(self.ctx.packages_path, self.name)): filename = shprint(sh.basename, url).stdout[:-1].decode('utf-8') do_download = True marker_filename = '.mark-{}'.format(filename) if exists(filename) and isfile(filename): if not exists(marker_filename): shprint(sh.rm, filename) elif expected_md5: current_md5 = md5sum(filename) if current_md5 != expected_md5: debug('* Generated md5sum: {}'.format(current_md5)) debug('* Expected md5sum: {}'.format(expected_md5)) raise ValueError( ('Generated md5sum does not match expected md5sum ' 'for {} recipe').format(self.name)) do_download = False else: do_download = False # If we got this far, we will download if do_download: debug('Downloading {} from {}'.format(self.name, url)) shprint(sh.rm, '-f', marker_filename) self.download_file(self.versioned_url, filename) shprint(sh.touch, marker_filename) if exists(filename) and isfile(filename) and expected_md5: current_md5 = md5sum(filename) if expected_md5 is not None: if current_md5 != expected_md5: debug('* Generated md5sum: {}'.format(current_md5)) debug('* Expected md5sum: {}'.format(expected_md5)) raise ValueError( ('Generated md5sum does not match expected md5sum ' 'for {} recipe').format(self.name)) else: info('{} download already cached, skipping'.format(self.name))
def prepare_build_dir(self): '''Ensure that a build dir exists for the recipe. This same single dir will be used for building all different archs.''' self.build_dir = self.get_build_dir() shprint(sh.cp, '-r', join(self.bootstrap_dir, 'build'), # join(self.ctx.root_dir, # 'bootstrap_templates', # self.name), self.build_dir) with current_directory(self.build_dir): with open('project.properties', 'w') as fileh: fileh.write('target=android-{}'.format(self.ctx.android_api))
def cythonize_file(self, env, build_dir, filename): short_filename = filename if filename.startswith(build_dir): short_filename = filename[len(build_dir) + 1:] info(u"Cythonize {}".format(short_filename)) cyenv = env.copy() if 'CYTHONPATH' in cyenv: cyenv['PYTHONPATH'] = cyenv['CYTHONPATH'] elif 'PYTHONPATH' in cyenv: del cyenv['PYTHONPATH'] cython = 'cython' if self.ctx.python_recipe.from_crystax else self.ctx.cython cython_command = sh.Command(cython) shprint(cython_command, filename, *self.cython_args, _env=cyenv)
def install_hostpython_package(self, arch): env = self.get_hostrecipe_env(arch) real_hostpython = sh.Command(self.real_hostpython_location) shprint( real_hostpython, "setup.py", "install", "-O2", "--root={}".format(dirname(self.real_hostpython_location)), "--install-lib=Lib/site-packages", _env=env, *self.setup_extra_args )
def reduce_object_file_names(self, dirn): """Recursively renames all files named XXX.cpython-...-linux-gnu.so" to "XXX.so", i.e. removing the erroneous architecture name coming from the local system. """ py_so_files = shprint(sh.find, dirn, '-iname', '*.so') filens = py_so_files.stdout.decode('utf-8').split('\n')[:-1] for filen in filens: file_dirname, file_basename = split(filen) parts = file_basename.split('.') if len(parts) <= 2: continue shprint(sh.mv, filen, join(file_dirname, parts[0] + '.so'))
def apply_patch(self, filename, arch, build_dir=None): """ Apply a patch from the current recipe directory into the current build directory. .. versionchanged:: 0.6.0 Add ability to apply patch from any dir via kwarg `build_dir`''' """ info("Applying patch {}".format(filename)) build_dir = build_dir if build_dir else self.get_build_dir(arch) filename = join(self.get_recipe_dir(), filename) shprint(sh.patch, "-t", "-d", build_dir, "-p1", "-i", filename, _tail=10)
def build_cython_components(self, arch): info('Cythonizing anything necessary in {}'.format(self.name)) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.ctx.hostpython) info('Trying first build of {} to get cython files: this is ' 'expected to fail'.format(self.name)) try: shprint(hostpython, 'setup.py', 'build_ext', _env=env, *self.setup_extra_args) except sh.ErrorReturnCode_1: print() info('{} first build failed (as expected)'.format(self.name)) info('Running cython where appropriate') shprint(sh.find, self.get_build_dir(arch.arch), '-iname', '*.pyx', '-exec', self.ctx.cython, '{}', ';', _env=env) info('ran cython') shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, _tail=20, _critical=True, *self.setup_extra_args) print('stripping') build_lib = glob.glob('./build/lib*') shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', env['STRIP'], '{}', ';', _env=env) print('stripped!?')
def install_python_package(self, arch): env = self.get_recipe_env(arch) info('Installing {} into site-packages'.format(self.name)) with current_directory(join(self.get_build_dir(arch.arch), 'python')): hostpython = sh.Command(self.hostpython_location) hpenv = env.copy() shprint(hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(self.ctx.get_python_install_dir()), '--install-lib=.', '--cpp_implementation', _env=hpenv, *self.setup_extra_args)
def prebuild_arch(self, arch): super(Python2Recipe, self).prebuild_arch(arch) patch_mark = join(self.get_build_dir(arch.arch), '.openssl-patched') if 'openssl' in self.ctx.recipe_build_order and not exists(patch_mark): self.apply_patch(join('patches', 'enable-openssl.patch'), arch.arch) shprint(sh.touch, patch_mark)
def build_cython_components(self, arch): info('Cythonizing anything necessary in {}'.format(self.name)) env = self.get_recipe_env(arch) if self.ctx.python_recipe.from_crystax: command = sh.Command('python{}'.format(self.ctx.python_recipe.version)) if hasattr(sys, 'real_prefix'): #: If within a virtualenv #: See https://github.com/pypa/virtualenv/issues/355 site_packages_dirs = command( '-c', 'from distutils.sysconfig import get_python_lib; print(get_python_lib())') else: site_packages_dirs = command( '-c', 'import site; print("\\n".join(site.getsitepackages()))') site_packages_dirs = site_packages_dirs.stdout.decode('utf-8').split('\n') if 'PYTHONPATH' in env: env['PYTHONPATH'] += ':{}'.format(':'.join(site_packages_dirs)) else: env['PYTHONPATH'] = ':'.join(site_packages_dirs) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.ctx.hostpython) shprint(hostpython, '-c', 'import sys; print(sys.path)', _env=env) print('cwd is', realpath(curdir)) info('Trying first build of {} to get cython files: this is ' 'expected to fail'.format(self.name)) manually_cythonise = False try: shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, *self.setup_extra_args) except sh.ErrorReturnCode_1: print() info('{} first build failed (as expected)'.format(self.name)) manually_cythonise = True if manually_cythonise: self.cythonize_build(env=env) shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, _tail=20, _critical=True, *self.setup_extra_args) else: info('First build appeared to complete correctly, skipping manual' 'cythonising.') if not '--gdb' in self.cython_args: # Then we don't want debug info... if 'python2' in self.ctx.recipe_build_order: info('Stripping object files') build_lib = glob.glob('./build/lib*') shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', env['STRIP'], '{}', ';', _env=env) if self.ctx.python_recipe.from_crystax: info('Stripping object files') shprint(sh.find, '.', '-iname', '*.so', '-exec', '/usr/bin/echo', '{}', ';', _env=env) shprint(sh.find, '.', '-iname', '*.so', '-exec', env['STRIP'].split(' ')[0], '--strip-unneeded', # '/usr/bin/strip', '--strip-unneeded', '{}', ';', _env=env)
def run_pymodules_install(ctx, modules): modules = list(filter(ctx.not_has_package, modules)) if not modules: info('There are no Python modules to install, skipping') return info('The requirements ({}) don\'t have recipes, attempting to install ' 'them with pip'.format(', '.join(modules))) info('If this fails, it may mean that the module has compiled ' 'components and needs a recipe.') venv = sh.Command(ctx.virtualenv) with current_directory(join(ctx.build_dir)): shprint( venv, '--python=python{}.{}'.format( ctx.python_recipe.major_minor_version_string.partition(".")[0], ctx.python_recipe.major_minor_version_string.partition(".") [2]), 'venv') info('Creating a requirements.txt file for the Python modules') with open('requirements.txt', 'w') as fileh: for module in modules: key = 'VERSION_' + module if key in environ: line = '{}=={}\n'.format(module, environ[key]) else: line = '{}\n'.format(module) fileh.write(line) # Prepare base environment and upgrade pip: base_env = copy.copy(os.environ) base_env["PYTHONPATH"] = ctx.get_site_packages_dir() info('Upgrade pip to latest version') shprint(sh.bash, '-c', ("source venv/bin/activate && pip install -U pip"), _env=copy.copy(base_env)) # Install Cython in case modules need it to build: info('Install Cython in case one of the modules needs it to build') shprint(sh.bash, '-c', ("venv/bin/pip install Cython"), _env=copy.copy(base_env)) # Get environment variables for build (with CC/compiler set): standard_recipe = CythonRecipe() standard_recipe.ctx = ctx # (note: following line enables explicit -lpython... linker options) standard_recipe.call_hostpython_via_targetpython = False recipe_env = standard_recipe.get_recipe_env(ctx.archs[0]) env = copy.copy(base_env) env.update(recipe_env) info('Installing Python modules with pip') info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. ' 'A reason for this is often modules compiling ' 'native code that is unaware of Android cross-compilation ' 'and does not work without additional ' 'changes / workarounds.') # Make sure our build package dir is available, and the virtualenv # site packages come FIRST (so the proper pip version is used): env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir() env["PYTHONPATH"] = os.path.abspath( join(ctx.build_dir, "venv", "lib", "python" + ctx.python_recipe.major_minor_version_string, "site-packages")) + ":" + env["PYTHONPATH"] ''' # Do actual install: shprint(sh.bash, '-c', ( "venv/bin/pip " + "install -v --target '{0}' --no-deps -r requirements.txt" ).format(ctx.get_site_packages_dir().replace("'", "'\"'\"'")), _env=copy.copy(env)) ''' # use old install script shprint(sh.bash, '-c', ( "source venv/bin/activate && env CC=/bin/false CXX=/bin/false " "PYTHONPATH={0} pip install --target '{0}' --no-deps -r requirements.txt" ).format(ctx.get_site_packages_dir())) # Strip object files after potential Cython or native code builds: standard_recipe.strip_object_files(ctx.archs[0], env, build_dir=ctx.build_dir)
def download(self): if self.url is None: info('Skipping {} download as no URL is set'.format(self.name)) return url = self.versioned_url expected_digests = {} for alg in set(hashlib.algorithms_guaranteed) | set( ('md5', 'sha512', 'blake2b')): expected_digest = getattr(self, alg + 'sum') if hasattr(self, alg + 'sum') else None ma = match(u'^(.+)#' + alg + u'=([0-9a-f]{32,})$', url) if ma: # fragmented URL? if expected_digest: raise ValueError(('Received {}sum from both the {} recipe ' 'and its url').format(alg, self.name)) url = ma.group(1) expected_digest = ma.group(2) if expected_digest: expected_digests[alg] = expected_digest shprint(sh.mkdir, '-p', join(self.ctx.packages_path, self.name)) with current_directory(join(self.ctx.packages_path, self.name)): filename = shprint(sh.basename, url).stdout[:-1].decode('utf-8') do_download = True marker_filename = '.mark-{}'.format(filename) if exists(filename) and isfile(filename): if not exists(marker_filename): shprint(sh.rm, filename) else: for alg, expected_digest in expected_digests.items(): current_digest = algsum(alg, filename) if current_digest != expected_digest: debug('* Generated {}sum: {}'.format( alg, current_digest)) debug('* Expected {}sum: {}'.format( alg, expected_digest)) raise ValueError(( 'Generated {0}sum does not match expected {0}sum ' 'for {1} recipe').format(alg, self.name)) do_download = False # If we got this far, we will download if do_download: debug('Downloading {} from {}'.format(self.name, url)) shprint(sh.rm, '-f', marker_filename) self.download_file(self.versioned_url, filename) shprint(sh.touch, marker_filename) if exists(filename) and isfile(filename): for alg, expected_digest in expected_digests.items(): current_digest = algsum(alg, filename) if current_digest != expected_digest: debug('* Generated {}sum: {}'.format( alg, current_digest)) debug('* Expected {}sum: {}'.format( alg, expected_digest)) raise ValueError(( 'Generated {0}sum does not match expected {0}sum ' 'for {1} recipe').format(alg, self.name)) else: info('{} download already cached, skipping'.format(self.name))
def run_pymodules_install(ctx, modules, project_dir=None, ignore_setup_py=False): """ This function will take care of all non-recipe things, by: 1. Processing them from --requirements (the modules argument) and installing them 2. Installing the user project/app itself via setup.py if ignore_setup_py=True """ info('*** PYTHON PACKAGE / PROJECT INSTALL STAGE ***') modules = list(filter(ctx.not_has_package, modules)) # We change current working directory later, so this has to be an absolute # path or `None` in case that we didn't supply the `project_dir` via kwargs project_dir = abspath(project_dir) if project_dir else None # Bail out if no python deps and no setup.py to process: if not modules and (ignore_setup_py or project_dir is None or not project_has_setup_py(project_dir)): info('No Python modules and no setup.py to process, skipping') return # Output messages about what we're going to do: if modules: info('The requirements ({}) don\'t have recipes, attempting to ' 'install them with pip'.format(', '.join(modules))) info('If this fails, it may mean that the module has compiled ' 'components and needs a recipe.') if project_dir is not None and \ project_has_setup_py(project_dir) and not ignore_setup_py: info('Will process project install, if it fails then the ' 'project may not be compatible for Android install.') venv = sh.Command(ctx.virtualenv) with current_directory(join(ctx.build_dir)): shprint( venv, '--python=python{}'.format( ctx.python_recipe.major_minor_version_string.partition(".") [0]), 'venv') # Prepare base environment and upgrade pip: base_env = copy.copy(os.environ) base_env["PYTHONPATH"] = ctx.get_site_packages_dir() info('Upgrade pip to latest version') shprint(sh.bash, '-c', ("source venv/bin/activate && pip install -U pip"), _env=copy.copy(base_env)) # Install Cython in case modules need it to build: info('Install Cython in case one of the modules needs it to build') shprint(sh.bash, '-c', ("venv/bin/pip install Cython"), _env=copy.copy(base_env)) # Get environment variables for build (with CC/compiler set): standard_recipe = CythonRecipe() standard_recipe.ctx = ctx # (note: following line enables explicit -lpython... linker options) standard_recipe.call_hostpython_via_targetpython = False recipe_env = standard_recipe.get_recipe_env(ctx.archs[0]) env = copy.copy(base_env) env.update(recipe_env) # Make sure our build package dir is available, and the virtualenv # site packages come FIRST (so the proper pip version is used): env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir() env["PYTHONPATH"] = os.path.abspath( join(ctx.build_dir, "venv", "lib", "python" + ctx.python_recipe.major_minor_version_string, "site-packages")) + ":" + env["PYTHONPATH"] # Install the manually specified requirements first: if not modules: info('There are no Python modules to install, skipping') else: info('Creating a requirements.txt file for the Python modules') with open('requirements.txt', 'w') as fileh: for module in modules: key = 'VERSION_' + module if key in environ: line = '{}=={}\n'.format(module, environ[key]) else: line = '{}\n'.format(module) fileh.write(line) info('Installing Python modules with pip') info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. ' 'A reason for this is often modules compiling ' 'native code that is unaware of Android cross-compilation ' 'and does not work without additional ' 'changes / workarounds.') shprint(sh.bash, '-c', ("venv/bin/pip " + "install -v --target '{0}' --no-deps -r requirements.txt" ).format(ctx.get_site_packages_dir().replace( "'", "'\"'\"'")), _env=copy.copy(env)) # Afterwards, run setup.py if present: if project_dir is not None and (project_has_setup_py(project_dir) and not ignore_setup_py): run_setuppy_install(ctx, project_dir, env) elif not ignore_setup_py: info("No setup.py found in project directory: " + str(project_dir)) # Strip object files after potential Cython or native code builds: standard_recipe.strip_object_files(ctx.archs[0], env, build_dir=ctx.build_dir)
def unpack(self, arch): info_main('Unpacking {} for {}'.format(self.name, arch)) build_dir = self.get_build_container_dir(arch) user_dir = environ.get('P4A_{}_DIR'.format(self.name.lower())) if user_dir is not None: info('P4A_{}_DIR exists, symlinking instead'.format( self.name.lower())) if exists(self.get_build_dir(arch)): return shprint(sh.rm, '-rf', build_dir) shprint(sh.mkdir, '-p', build_dir) shprint(sh.rmdir, build_dir) ensure_dir(build_dir) shprint(sh.cp, '-a', user_dir, self.get_build_dir(arch)) return if self.url is None: info('Skipping {} unpack as no URL is set'.format(self.name)) return filename = shprint(sh.basename, self.versioned_url).stdout[:-1].decode('utf-8') ma = match(u'^(.+)#md5=([0-9a-f]{32})$', filename) if ma: # fragmented URL? filename = ma.group(1) with current_directory(build_dir): directory_name = self.get_build_dir(arch) if not exists(directory_name) or not isdir(directory_name): extraction_filename = join(self.ctx.packages_path, self.name, filename) if isfile(extraction_filename): if extraction_filename.endswith('.zip'): try: sh.unzip(extraction_filename) except (sh.ErrorReturnCode_1, sh.ErrorReturnCode_2): # return code 1 means unzipping had # warnings but did complete, # apparently happens sometimes with # github zips pass import zipfile fileh = zipfile.ZipFile(extraction_filename, 'r') root_directory = fileh.filelist[0].filename.split( '/')[0] if root_directory != basename(directory_name): shprint(sh.mv, root_directory, directory_name) elif extraction_filename.endswith( ('.tar.gz', '.tgz', '.tar.bz2', '.tbz2', '.tar.xz', '.txz')): sh.tar('xf', extraction_filename) root_directory = sh.tar( 'tf', extraction_filename).stdout.decode( 'utf-8').split('\n')[0].split('/')[0] if root_directory != basename(directory_name): shprint(sh.mv, root_directory, directory_name) else: raise Exception( 'Could not extract {} download, it must be .zip, ' '.tar.gz or .tar.bz2 or .tar.xz'.format( extraction_filename)) elif isdir(extraction_filename): mkdir(directory_name) for entry in listdir(extraction_filename): if entry not in ('.git', ): shprint(sh.cp, '-Rv', join(extraction_filename, entry), directory_name) else: raise Exception( 'Given path is neither a file nor a directory: {}'. format(extraction_filename)) else: info('{} is already unpacked, skipping'.format(self.name))
def prebuild_arch(self, arch): # Override hostpython Setup? shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'), join(self.get_build_dir(), 'Modules', 'Setup'))
def build_arch(self, arch): build_dir = join(self.get_build_dir(arch.arch), 'build') shprint(sh.mkdir, '-p', build_dir) opencv_extras = [] if 'opencv_extras' in self.ctx.recipe_build_order: opencv_extras_dir = self.get_recipe( 'opencv_extras', self.ctx).get_build_dir(arch.arch) opencv_extras = [ f'-DOPENCV_EXTRA_MODULES_PATH={opencv_extras_dir}/modules', '-DBUILD_opencv_legacy=OFF', ] with current_directory(build_dir): env = self.get_recipe_env(arch) python_major = self.ctx.python_recipe.version[0] python_include_root = self.ctx.python_recipe.include_root( arch.arch) python_site_packages = self.ctx.get_site_packages_dir() python_link_root = self.ctx.python_recipe.link_root(arch.arch) python_link_version = self.ctx.python_recipe.major_minor_version_string if 'python3' in self.ctx.python_recipe.name: python_link_version += 'm' python_library = join(python_link_root, 'libpython{}.so'.format(python_link_version)) python_include_numpy = join(python_site_packages, 'numpy', 'core', 'include') shprint( sh.cmake, '-DP4A=ON', '-DANDROID_ABI={}'.format(arch.arch), '-DANDROID_STANDALONE_TOOLCHAIN={}'.format(self.ctx.ndk_dir), '-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api), '-DANDROID_EXECUTABLE={}/tools/android'.format( env['ANDROID_SDK']), '-DCMAKE_TOOLCHAIN_FILE={}'.format( join(self.ctx.ndk_dir, 'build', 'cmake', 'android.toolchain.cmake')), # Make the linkage with our python library, otherwise we # will get dlopen error when trying to import cv2's module. '-DCMAKE_SHARED_LINKER_FLAGS=-L{path} -lpython{version}'. format(path=python_link_root, version=python_link_version), '-DBUILD_WITH_STANDALONE_TOOLCHAIN=ON', # Force to build as shared libraries the cv2's dependant # libs or we will not be able to link with our python '-DBUILD_SHARED_LIBS=ON', '-DBUILD_STATIC_LIBS=OFF', # Disable some opencv's features '-DBUILD_opencv_java=OFF', '-DBUILD_opencv_java_bindings_generator=OFF', # '-DBUILD_opencv_highgui=OFF', # '-DBUILD_opencv_imgproc=OFF', # '-DBUILD_opencv_flann=OFF', '-DBUILD_TESTS=OFF', '-DBUILD_PERF_TESTS=OFF', '-DENABLE_TESTING=OFF', '-DBUILD_EXAMPLES=OFF', '-DBUILD_ANDROID_EXAMPLES=OFF', # Force to only build our version of python '-DBUILD_OPENCV_PYTHON{major}=ON'.format(major=python_major), '-DBUILD_OPENCV_PYTHON{major}=OFF'.format( major='2' if python_major == '3' else '3'), # Force to install the `cv2.so` library directly into # python's site packages (otherwise the cv2's loader fails # on finding the cv2.so library) '-DOPENCV_SKIP_PYTHON_LOADER=ON', '-DOPENCV_PYTHON{major}_INSTALL_PATH={site_packages}'.format( major=python_major, site_packages=python_site_packages), # Define python's paths for: exe, lib, includes, numpy... '-DPYTHON_DEFAULT_EXECUTABLE={}'.format(self.ctx.hostpython), '-DPYTHON{major}_EXECUTABLE={host_python}'.format( major=python_major, host_python=self.ctx.hostpython), '-DPYTHON{major}_INCLUDE_PATH={include_path}'.format( major=python_major, include_path=python_include_root), '-DPYTHON{major}_LIBRARIES={python_lib}'.format( major=python_major, python_lib=python_library), '-DPYTHON{major}_NUMPY_INCLUDE_DIRS={numpy_include}'.format( major=python_major, numpy_include=python_include_numpy), '-DPYTHON{major}_PACKAGES_PATH={site_packages}'.format( major=python_major, site_packages=python_site_packages), *opencv_extras, self.get_build_dir(arch.arch), _env=env) shprint(sh.make, '-j' + str(cpu_count()), 'opencv_python' + python_major) # Install python bindings (cv2.so) shprint(sh.cmake, '-DCOMPONENT=python', '-P', './cmake_install.cmake') # Copy third party shared libs that we need in our final apk sh.cp('-a', sh.glob('./lib/{}/lib*.so'.format(arch.arch)), self.ctx.get_libs_dir(arch.arch))
def download_file(self, url, target, cwd=None): """ (internal) Download an ``url`` to a ``target``. """ if not url: return info('Downloading {} from {}'.format(self.name, url)) if cwd: target = join(cwd, target) parsed_url = urlparse(url) if parsed_url.scheme in ('http', 'https'): def report_hook(index, blksize, size): if size <= 0: progression = '{0} bytes'.format(index * blksize) else: progression = '{0:.2f}%'.format(index * blksize * 100. / float(size)) if "CI" not in environ: stdout.write('- Download {}\r'.format(progression)) stdout.flush() if exists(target): unlink(target) # Download item with multiple attempts (for bad connections): attempts = 0 while True: try: urlretrieve(url, target, report_hook) except OSError: attempts += 1 if attempts >= 5: raise stdout.write('Download failed retrying in a second...') time.sleep(1) continue break return target elif parsed_url.scheme in ('git', 'git+file', 'git+ssh', 'git+http', 'git+https'): if isdir(target): with current_directory(target): shprint(sh.git, 'fetch', '--tags') if self.version: shprint(sh.git, 'checkout', self.version) shprint(sh.git, 'pull') shprint(sh.git, 'pull', '--recurse-submodules') shprint(sh.git, 'submodule', 'update', '--recursive') else: if url.startswith('git+'): url = url[4:] shprint(sh.git, 'clone', '--recursive', url, target) if self.version: with current_directory(target): shprint(sh.git, 'checkout', self.version) shprint(sh.git, 'submodule', 'update', '--recursive') return target
def copylibs_function(soname, objs_paths, extra_link_dirs=[], env=None): print('objs_paths are', objs_paths) re_needso = re.compile( r'^.*\(NEEDED\)\s+Shared library: \[lib(.*)\.so\]\s*$') blacklist_libs = ( 'c', 'stdc++', 'dl', 'python2.7', 'sdl', 'sdl_image', 'sdl_ttf', 'z', 'm', 'GLESv2', 'jpeg', 'png', 'log', # bootstrap takes care of sdl2 libs (if applicable) 'SDL2', 'SDL2_ttf', 'SDL2_image', 'SDL2_mixer', ) found_libs = [] sofiles = [] if env and 'READELF' in env: readelf = env['READELF'] elif 'READELF' in os.environ: readelf = os.environ['READELF'] else: readelf = sh.which('readelf').strip() readelf = sh.Command(readelf).bake('-d') dest = dirname(soname) for directory in objs_paths: for fn in os.listdir(directory): fn = join(directory, fn) if not fn.endswith('.libs'): continue dirfn = fn[:-1] + 'dirs' if not exists(dirfn): continue with open(fn) as f: libs = f.read().strip().split(' ') needed_libs = [ lib for lib in libs if lib and lib not in blacklist_libs and lib not in found_libs ] while needed_libs: print('need libs:\n\t' + '\n\t'.join(needed_libs)) start_needed_libs = needed_libs[:] found_sofiles = [] with open(dirfn) as f: libdirs = f.read().split() for libdir in libdirs: if not needed_libs: break if libdir == dest: # don't need to copy from dest to dest! continue libdir = libdir.strip() print('scanning', libdir) for lib in needed_libs[:]: if lib in found_libs: continue if lib.endswith('.a'): needed_libs.remove(lib) found_libs.append(lib) continue lib_a = 'lib' + lib + '.a' libpath_a = join(libdir, lib_a) lib_so = 'lib' + lib + '.so' libpath_so = join(libdir, lib_so) plain_so = lib + '.so' plainpath_so = join(libdir, plain_so) sopath = None if exists(libpath_so): sopath = libpath_so elif exists(plainpath_so): sopath = plainpath_so if sopath: print('found', lib, 'in', libdir) found_sofiles.append(sopath) needed_libs.remove(lib) found_libs.append(lib) continue if exists(libpath_a): print('found', lib, '(static) in', libdir) needed_libs.remove(lib) found_libs.append(lib) continue for sofile in found_sofiles: print('scanning dependencies for', sofile) out = readelf(sofile) for line in out.splitlines(): needso = re_needso.match(line) if needso: lib = needso.group(1) if (lib not in needed_libs and lib not in found_libs and lib not in blacklist_libs): needed_libs.append(needso.group(1)) sofiles += found_sofiles if needed_libs == start_needed_libs: raise RuntimeError( 'Failed to locate needed libraries!\n\t' + '\n\t'.join(needed_libs)) print('Copying libraries') for lib in sofiles: shprint(sh.cp, lib, dest)
def apk(self, args): """Create an APK using the given distribution.""" ctx = self.ctx dist = self._dist # Manually fixing these arguments at the string stage is # unsatisfactory and should probably be changed somehow, but # we can't leave it until later as the build.py scripts assume # they are in the current directory. fix_args = ('--dir', '--private', '--add-jar', '--add-source', '--whitelist', '--blacklist', '--presplash', '--icon') unknown_args = args.unknown_args for i, arg in enumerate(unknown_args): argx = arg.split('=') if argx[0] in fix_args: if len(argx) > 1: unknown_args[i] = '='.join( (argx[0], realpath(expanduser(argx[1])))) else: unknown_args[i+1] = realpath(expanduser(unknown_args[i+1])) env = os.environ.copy() if args.build_mode == 'release': if args.keystore: env['P4A_RELEASE_KEYSTORE'] = realpath(expanduser(args.keystore)) if args.signkey: env['P4A_RELEASE_KEYALIAS'] = args.signkey if args.keystorepw: env['P4A_RELEASE_KEYSTORE_PASSWD'] = args.keystorepw if args.signkeypw: env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.signkeypw elif args.keystorepw and 'P4A_RELEASE_KEYALIAS_PASSWD' not in env: env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.keystorepw build = imp.load_source('build', join(dist.dist_dir, 'build.py')) with current_directory(dist.dist_dir): self.hook("before_apk_build") os.environ["ANDROID_API"] = str(self.ctx.android_api) build_args = build.parse_args(args.unknown_args) self.hook("after_apk_build") self.hook("before_apk_assemble") build_type = ctx.java_build_tool if build_type == 'auto': info('Selecting java build tool:') build_tools_versions = os.listdir(join(ctx.sdk_dir, 'build-tools')) build_tools_versions = sorted(build_tools_versions, key=LooseVersion) build_tools_version = build_tools_versions[-1] info(('Detected highest available build tools ' 'version to be {}').format(build_tools_version)) if build_tools_version >= '25.0' and exists('gradlew'): build_type = 'gradle' info(' Building with gradle, as gradle executable is ' 'present') else: build_type = 'ant' if build_tools_version < '25.0': info((' Building with ant, as the highest ' 'build-tools-version is only {}').format( build_tools_version)) else: info(' Building with ant, as no gradle executable ' 'detected') if build_type == 'gradle': # gradle-based build env["ANDROID_NDK_HOME"] = self.ctx.ndk_dir env["ANDROID_HOME"] = self.ctx.sdk_dir gradlew = sh.Command('./gradlew') if exists('/usr/bin/dos2unix'): # .../dists/bdisttest_python3/gradlew # .../build/bootstrap_builds/sdl2-python3crystax/gradlew # if docker on windows, gradle contains CRLF output = shprint( sh.Command('dos2unix'), gradlew._path.decode('utf8'), _tail=20, _critical=True, _env=env ) if args.build_mode == "debug": gradle_task = "assembleDebug" elif args.build_mode == "release": gradle_task = "assembleRelease" else: error("Unknown build mode {} for apk()".format( args.build_mode)) exit(1) output = shprint(gradlew, gradle_task, _tail=20, _critical=True, _env=env) # gradle output apks somewhere else # and don't have version in file apk_dir = join(dist.dist_dir, "build", "outputs", "apk", args.build_mode) apk_glob = "*-{}.apk" apk_add_version = True else: # ant-based build try: ant = sh.Command('ant') except sh.CommandNotFound: error('Could not find ant binary, please install it ' 'and make sure it is in your $PATH.') exit(1) output = shprint(ant, args.build_mode, _tail=20, _critical=True, _env=env) apk_dir = join(dist.dist_dir, "bin") apk_glob = "*-*-{}.apk" apk_add_version = False self.hook("after_apk_assemble") info_main('# Copying APK to current directory') apk_re = re.compile(r'.*Package: (.*\.apk)$') apk_file = None for line in reversed(output.splitlines()): m = apk_re.match(line) if m: apk_file = m.groups()[0] break if not apk_file: info_main('# APK filename not found in build output. Guessing...') if args.build_mode == "release": suffixes = ("release", "release-unsigned") else: suffixes = ("debug", ) for suffix in suffixes: apks = glob.glob(join(apk_dir, apk_glob.format(suffix))) if apks: if len(apks) > 1: info('More than one built APK found... guessing you ' 'just built {}'.format(apks[-1])) apk_file = apks[-1] break else: raise ValueError('Couldn\'t find the built APK') info_main('# Found APK file: {}'.format(apk_file)) if apk_add_version: info('# Add version number to APK') apk_name, apk_suffix = basename(apk_file).split("-", 1) apk_file_dest = "{}-{}-{}".format( apk_name, build_args.version, apk_suffix) info('# APK renamed to {}'.format(apk_file_dest)) shprint(sh.cp, apk_file, apk_file_dest) else: shprint(sh.cp, apk_file, './')
def create_python_bundle(self, dirn, arch): """ Create a packaged python bundle in the target directory, by copying all the modules and standard library to the right place. """ # Todo: find a better way to find the build libs folder modules_build_dir = join( self.get_build_dir(arch.arch), 'android-build', 'build', 'lib.linux{}-{}-{}'.format('2' if self.version[0] == '2' else '', arch.command_prefix.split('-')[0], self.major_minor_version_string)) # Compile to *.pyc/*.pyo the python modules self.compile_python_files(modules_build_dir) # Compile to *.pyc/*.pyo the standard python library self.compile_python_files(join(self.get_build_dir(arch.arch), 'Lib')) # Compile to *.pyc/*.pyo the other python packages (site-packages) self.compile_python_files(self.ctx.get_python_install_dir()) # Bundle compiled python modules to a folder modules_dir = join(dirn, 'modules') c_ext = self.compiled_extension ensure_dir(modules_dir) module_filens = (glob.glob(join(modules_build_dir, '*.so')) + glob.glob(join(modules_build_dir, '*' + c_ext))) info("Copy {} files into the bundle".format(len(module_filens))) for filen in module_filens: info(" - copy {}".format(filen)) copy2(filen, modules_dir) # zip up the standard library stdlib_zip = join(dirn, 'stdlib.zip') with current_directory(join(self.get_build_dir(arch.arch), 'Lib')): stdlib_filens = list( walk_valid_filens('.', self.stdlib_dir_blacklist, self.stdlib_filen_blacklist)) info("Zip {} files into the bundle".format(len(stdlib_filens))) shprint(sh.zip, stdlib_zip, *stdlib_filens) # copy the site-packages into place ensure_dir(join(dirn, 'site-packages')) ensure_dir(self.ctx.get_python_install_dir()) # TODO: Improve the API around walking and copying the files with current_directory(self.ctx.get_python_install_dir()): filens = list( walk_valid_filens('.', self.site_packages_dir_blacklist, self.site_packages_filen_blacklist)) info("Copy {} files into the site-packages".format(len(filens))) for filen in filens: info(" - copy {}".format(filen)) ensure_dir(join(dirn, 'site-packages', dirname(filen))) copy2(filen, join(dirn, 'site-packages', filen)) # copy the python .so files into place python_build_dir = join(self.get_build_dir(arch.arch), 'android-build') python_lib_name = 'libpython' + self.major_minor_version_string if self.major_minor_version_string[0] == '3': python_lib_name += 'm' shprint(sh.cp, join(python_build_dir, python_lib_name + '.so'), join(self.ctx.bootstrap.dist_dir, 'libs', arch.arch)) info('Renaming .so files to reflect cross-compile') self.reduce_object_file_names(join(dirn, 'site-packages')) return join(dirn, 'site-packages')
def prebuild_arch(self, arch): if self.is_patched(arch): return shprint(sh.cp, join(self.get_recipe_dir(), 'Setup'), join(self.get_build_dir(arch.arch), 'Setup'))
def test_init_package(): if os.path.exists('tmp/enaml-native-test'): sh.rm('-R', 'tmp/enaml-native-test') cmd = sh.Command('enaml-native') shprint(cmd, 'init-package', 'enaml-native-test', 'tmp/', _debug=True)
def apk(self, args): '''Create an APK using the given distribution.''' ctx = self.ctx dist = self._dist # Manually fixing these arguments at the string stage is # unsatisfactory and should probably be changed somehow, but # we can't leave it until later as the build.py scripts assume # they are in the current directory. fix_args = ('--dir', '--private', '--add-jar', '--add-source', '--whitelist', '--blacklist', '--presplash', '--icon') unknown_args = args.unknown_args for i, arg in enumerate(unknown_args[:-1]): argx = arg.split('=') if argx[0] in fix_args: if len(argx) > 1: unknown_args[i] = '='.join( (argx[0], realpath(expanduser(argx[1])))) else: unknown_args[i + 1] = realpath( expanduser(unknown_args[i + 1])) env = os.environ.copy() if args.build_mode == 'release': if args.keystore: env['P4A_RELEASE_KEYSTORE'] = realpath( expanduser(args.keystore)) if args.signkey: env['P4A_RELEASE_KEYALIAS'] = args.signkey if args.keystorepw: env['P4A_RELEASE_KEYSTORE_PASSWD'] = args.keystorepw if args.signkeypw: env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.signkeypw elif args.keystorepw and 'P4A_RELEASE_KEYALIAS_PASSWD' not in env: env['P4A_RELEASE_KEYALIAS_PASSWD'] = args.keystorepw build = imp.load_source('build', join(dist.dist_dir, 'build.py')) with current_directory(dist.dist_dir): build_args = build.parse_args(args.unknown_args) output = shprint(sh.ant, args.build_mode, _tail=20, _critical=True, _env=env) info_main('# Copying APK to current directory') apk_re = re.compile(r'.*Package: (.*\.apk)$') apk_file = None for line in reversed(output.splitlines()): m = apk_re.match(line) if m: apk_file = m.groups()[0] break if not apk_file: info_main( '# APK filename not found in build output, trying to guess') apks = glob.glob( join(dist.dist_dir, 'bin', '*-*-{}.apk'.format(args.build_mode))) if len(apks) == 0: raise ValueError('Couldn\'t find the built APK') if len(apks) > 1: info('More than one built APK found...guessing you ' 'just built {}'.format(apks[-1])) apk_file = apks[-1] info_main('# Found APK file: {}'.format(apk_file)) shprint(sh.cp, apk_file, './')
def download_file(self, url, target, cwd=None): """ (internal) Download an ``url`` to a ``target``. """ if not url: return info('Downloading {} from {}'.format(self.name, url)) if cwd: target = join(cwd, target) parsed_url = urlparse(url) if parsed_url.scheme in ('http', 'https'): def report_hook(index, blksize, size): if size <= 0: progression = '{0} bytes'.format(index * blksize) else: progression = '{0:.2f}%'.format(index * blksize * 100. / float(size)) stdout.write('- Download {}\r'.format(progression)) stdout.flush() if exists(target): unlink(target) urlretrieve(url, target, report_hook) return target elif parsed_url.scheme in ('git', 'git+ssh', 'git+http', 'git+https'): if isdir(target): with current_directory(target): shprint(sh.git, 'fetch', '--tags') if self.version: shprint(sh.git, 'checkout', self.version) shprint(sh.git, 'pull') shprint(sh.git, 'pull', '--recurse-submodules') shprint(sh.git, 'submodule', 'update', '--recursive') else: if url.startswith('git+'): url = url[4:] shprint(sh.git, 'clone', '--recursive', url, target) if self.version: with current_directory(target): shprint(sh.git, 'checkout', self.version) shprint(sh.git, 'submodule', 'update', '--recursive') return target
def download(self): if self.url is None: info('Skipping {} download as no URL is set'.format(self.name)) return url = self.versioned_url shprint(sh.mkdir, '-p', join(self.ctx.packages_path, self.name)) with current_directory(join(self.ctx.packages_path, self.name)): filename = shprint(sh.basename, url).stdout[:-1].decode('utf-8') do_download = True marker_filename = '.mark-{}'.format(filename) if exists(filename) and isfile(filename): if not exists(marker_filename): shprint(sh.rm, filename) elif self.md5sum: current_md5 = shprint(sh.md5sum, filename).split()[0] if current_md5 == self.md5sum: debug('Downloaded expected content!') do_download = False else: info('Downloaded unexpected content...') debug('* Generated md5sum: {}'.format(current_md5)) debug('* Expected md5sum: {}'.format(self.md5sum)) else: do_download = False info('{} download already cached, skipping'.format( self.name)) # Should check headers here! warning('Should check headers here! Skipping for now.') # If we got this far, we will download if do_download: debug('Downloading {} from {}'.format(self.name, url)) shprint(sh.rm, '-f', marker_filename) self.download_file(url, filename) shprint(sh.touch, marker_filename) if exists(filename) and isfile(filename) and self.md5sum: current_md5 = shprint(sh.md5sum, filename).split()[0] if self.md5sum is not None: if current_md5 == self.md5sum: debug('Downloaded expected content!') else: info('Downloaded unexpected content...') debug('* Generated md5sum: {}'.format(current_md5)) debug('* Expected md5sum: {}'.format(self.md5sum)) exit(1)
def unpack(self, arch): info_main('Unpacking {} for {}'.format(self.name, arch)) build_dir = self.get_build_container_dir(arch) user_dir = environ.get('P4A_{}_DIR'.format(self.name.lower())) if user_dir is not None: info('P4A_{}_DIR exists, symlinking instead'.format( self.name.lower())) # AND: Currently there's something wrong if I use ln, fix this warning('Using cp -a instead of symlink...fix this!') if exists(self.get_build_dir(arch)): return shprint(sh.rm, '-rf', build_dir) shprint(sh.mkdir, '-p', build_dir) shprint(sh.rmdir, build_dir) ensure_dir(build_dir) shprint(sh.cp, '-a', user_dir, self.get_build_dir(arch)) return if self.url is None: info('Skipping {} unpack as no URL is set'.format(self.name)) return filename = shprint(sh.basename, self.versioned_url).stdout[:-1].decode('utf-8') with current_directory(build_dir): directory_name = self.get_build_dir(arch) # AND: Could use tito's get_archive_rootdir here if not exists(directory_name) or not isdir(directory_name): extraction_filename = join(self.ctx.packages_path, self.name, filename) if isfile(extraction_filename): if extraction_filename.endswith('.zip'): sh.unzip(extraction_filename) import zipfile fileh = zipfile.ZipFile(extraction_filename, 'r') root_directory = fileh.filelist[0].filename.split( '/')[0] if root_directory != directory_name: shprint(sh.mv, root_directory, directory_name) elif (extraction_filename.endswith('.tar.gz') or extraction_filename.endswith('.tgz') or extraction_filename.endswith('.tar.bz2') or extraction_filename.endswith('.tbz2') or extraction_filename.endswith('.tar.xz') or extraction_filename.endswith('.txz')): sh.tar('xf', extraction_filename) root_directory = shprint( sh.tar, 'tf', extraction_filename).stdout.decode( 'utf-8').split('\n')[0].split('/')[0] if root_directory != directory_name: shprint(sh.mv, root_directory, directory_name) else: raise Exception( 'Could not extract {} download, it must be .zip, ' '.tar.gz or .tar.bz2 or .tar.xz') elif isdir(extraction_filename): mkdir(directory_name) for entry in listdir(extraction_filename): if entry not in ('.git', ): shprint(sh.cp, '-Rv', join(extraction_filename, entry), directory_name) else: raise Exception( 'Given path is neither a file nor a directory: {}'. format(extraction_filename)) else: info('{} is already unpacked, skipping'.format(self.name))
def build_arch(self, arch): super(JpegRecipe, self).build_arch(arch) with current_directory(self.get_lib_dir(arch)): shprint(sh.mv, 'libjpeg.a', 'libjpeg-orig.a') shprint(sh.ar, '-rcT', 'libjpeg.a', 'libjpeg-orig.a', 'libsimd.a')
def run_pymodules_install(ctx, modules, project_dir=None, ignore_setup_py=False): """ This function will take care of all non-recipe things, by: 1. Processing them from --requirements (the modules argument) and installing them 2. Installing the user project/app itself via setup.py if ignore_setup_py=True """ info('*** PYTHON PACKAGE / PROJECT INSTALL STAGE ***') modules = list(filter(ctx.not_has_package, modules)) # Bail out if no python deps and no setup.py to process: if not modules and (ignore_setup_py or project_dir is None or not project_has_setup_py(project_dir)): info('No Python modules and no setup.py to process, skipping') return # Output messages about what we're going to do: if modules: info('The requirements ({}) don\'t have recipes, attempting to ' 'install them with pip'.format(', '.join(modules))) info('If this fails, it may mean that the module has compiled ' 'components and needs a recipe.') if project_dir is not None and \ project_has_setup_py(project_dir) and not ignore_setup_py: info('Will process project install, if it fails then the ' 'project may not be compatible for Android install.') venv = sh.Command(ctx.virtualenv) with current_directory(join(ctx.build_dir)): shprint( venv, '--python=python{}'.format( ctx.python_recipe.major_minor_version_string.partition(".") [0]), 'venv') # Prepare base environment and upgrade pip: base_env = copy.copy(os.environ) base_env["PYTHONPATH"] = ctx.get_site_packages_dir() info('Upgrade pip to latest version') shprint(sh.bash, '-c', ("source venv/bin/activate && pip install -U pip"), _env=copy.copy(base_env)) # Install Cython in case modules need it to build: info('Install Cython in case one of the modules needs it to build') shprint(sh.bash, '-c', ("venv/bin/pip install Cython"), _env=copy.copy(base_env)) # Get environment variables for build (with CC/compiler set): standard_recipe = CythonRecipe() standard_recipe.ctx = ctx # (note: following line enables explicit -lpython... linker options) standard_recipe.call_hostpython_via_targetpython = False recipe_env = standard_recipe.get_recipe_env(ctx.archs[0]) env = copy.copy(base_env) env.update(recipe_env) # Make sure our build package dir is available, and the virtualenv # site packages come FIRST (so the proper pip version is used): env["PYTHONPATH"] += ":" + ctx.get_site_packages_dir() env["PYTHONPATH"] = os.path.abspath( join(ctx.build_dir, "venv", "lib", "python" + ctx.python_recipe.major_minor_version_string, "site-packages")) + ":" + env["PYTHONPATH"] # Install the manually specified requirements first: if not modules: info('There are no Python modules to install, skipping') else: info('Creating a requirements.txt file for the Python modules') with open('requirements.txt', 'w') as fileh: for module in modules: key = 'VERSION_' + module if key in environ: line = '{}=={}\n'.format(module, environ[key]) else: line = '{}\n'.format(module) fileh.write(line) info('Installing Python modules with pip') info('IF THIS FAILS, THE MODULES MAY NEED A RECIPE. ' 'A reason for this is often modules compiling ' 'native code that is unaware of Android cross-compilation ' 'and does not work without additional ' 'changes / workarounds.') shprint(sh.bash, '-c', ("venv/bin/pip " + "install -v --target '{0}' --no-deps -r requirements.txt" ).format(ctx.get_site_packages_dir().replace( "'", "'\"'\"'")), _env=copy.copy(env)) # Afterwards, run setup.py if present: if project_dir is not None and (project_has_setup_py(project_dir) and not ignore_setup_py): with current_directory(project_dir): info('got setup.py or similar, running project install. ' + '(disable this behavior with --ignore-setup-py)') # Compute & output the constraints we will use: info('Contents that will be used for constraints.txt:') constraints = subprocess.check_output( [join(ctx.build_dir, "venv", "bin", "pip"), "freeze"], env=copy.copy(env)) try: constraints = constraints.decode("utf-8", "replace") except AttributeError: pass info(constraints) # Make sure all packages found are fixed in version # by writing a constraint file, to avoid recipes being # upgraded & reinstalled: with open('constraints.txt', 'wb') as fileh: fileh.write(constraints.encode("utf-8", "replace")) info('Populating venv\'s site-packages with ' 'ctx.get_site_packages_dir()...') # Copy dist contents into site-packages for discovery. # Why this is needed: # --target is somewhat evil and messes with discovery of # packages in PYTHONPATH if that also includes the target # folder. So we need to use the regular virtualenv # site-packages folder instead. # Reference: # https://github.com/pypa/pip/issues/6223 ctx_site_packages_dir = os.path.normpath( os.path.abspath(ctx.get_site_packages_dir())) venv_site_packages_dir = os.path.normpath( os.path.join(ctx.build_dir, "venv", "lib", [ f for f in os.listdir( os.path.join(ctx.build_dir, "venv", "lib")) if f.startswith("python") ][0], "site-packages")) copied_over_contents = [] for f in os.listdir(ctx_site_packages_dir): full_path = os.path.join(ctx_site_packages_dir, f) if not os.path.exists( os.path.join(venv_site_packages_dir, f)): if os.path.isdir(full_path): shutil.copytree( full_path, os.path.join(venv_site_packages_dir, f)) else: shutil.copy2( full_path, os.path.join(venv_site_packages_dir, f)) copied_over_contents.append(f) # Get listing of virtualenv's site-packages, to see the # newly added things afterwards & copy them back into # the distribution folder / build context site-packages: previous_venv_contents = os.listdir(venv_site_packages_dir) # Actually run setup.py: info('Launching package install...') shprint(sh.bash, '-c', ("'" + join(ctx.build_dir, "venv", "bin", "pip").replace("'", "'\"'\"'") + "' " + "install -c constraints.txt -v .").format( ctx.get_site_packages_dir().replace( "'", "'\"'\"'")), _env=copy.copy(env)) # Go over all new additions and copy them back: info('Copying additions resulting from setup.py back ' + 'into ctx.get_site_packages_dir()...') new_venv_additions = [] for f in (set(os.listdir(venv_site_packages_dir)) - set(previous_venv_contents)): new_venv_additions.append(f) full_path = os.path.join(venv_site_packages_dir, f) if os.path.isdir(full_path): shutil.copytree(full_path, os.path.join(ctx_site_packages_dir, f)) else: shutil.copy2(full_path, os.path.join(ctx_site_packages_dir, f)) # Undo all the changes we did to the venv-site packages: info('Reverting additions to virtualenv\'s site-packages...') for f in set(copied_over_contents + new_venv_additions): full_path = os.path.join(venv_site_packages_dir, f) if os.path.isdir(full_path): shutil.rmtree(full_path) else: os.remove(full_path) elif not ignore_setup_py: info("No setup.py found in project directory: " + str(project_dir)) # Strip object files after potential Cython or native code builds: standard_recipe.strip_object_files(ctx.archs[0], env, build_dir=ctx.build_dir)
def run_setuppy_install(ctx, project_dir, env=None): if env is None: env = dict() with current_directory(project_dir): info('got setup.py or similar, running project install. ' + '(disable this behavior with --ignore-setup-py)') # Compute & output the constraints we will use: info('Contents that will be used for constraints.txt:') constraints = subprocess.check_output( [join(ctx.build_dir, "venv", "bin", "pip"), "freeze"], env=copy.copy(env)) try: constraints = constraints.decode("utf-8", "replace") except AttributeError: pass info(constraints) # Make sure all packages found are fixed in version # by writing a constraint file, to avoid recipes being # upgraded & reinstalled: with open('._tmp_p4a_recipe_constraints.txt', 'wb') as fileh: fileh.write(constraints.encode("utf-8", "replace")) try: info('Populating venv\'s site-packages with ' 'ctx.get_site_packages_dir()...') # Copy dist contents into site-packages for discovery. # Why this is needed: # --target is somewhat evil and messes with discovery of # packages in PYTHONPATH if that also includes the target # folder. So we need to use the regular virtualenv # site-packages folder instead. # Reference: # https://github.com/pypa/pip/issues/6223 ctx_site_packages_dir = os.path.normpath( os.path.abspath(ctx.get_site_packages_dir())) venv_site_packages_dir = os.path.normpath( os.path.join(ctx.build_dir, "venv", "lib", [ f for f in os.listdir( os.path.join(ctx.build_dir, "venv", "lib")) if f.startswith("python") ][0], "site-packages")) copied_over_contents = [] for f in os.listdir(ctx_site_packages_dir): full_path = os.path.join(ctx_site_packages_dir, f) if not os.path.exists(os.path.join(venv_site_packages_dir, f)): if os.path.isdir(full_path): shutil.copytree( full_path, os.path.join(venv_site_packages_dir, f)) else: shutil.copy2(full_path, os.path.join(venv_site_packages_dir, f)) copied_over_contents.append(f) # Get listing of virtualenv's site-packages, to see the # newly added things afterwards & copy them back into # the distribution folder / build context site-packages: previous_venv_contents = os.listdir(venv_site_packages_dir) # Actually run setup.py: info('Launching package install...') shprint( sh.bash, '-c', ("'" + join(ctx.build_dir, "venv", "bin", "pip").replace( "'", "'\"'\"'") + "' " + "install -c ._tmp_p4a_recipe_constraints.txt -v .").format( ctx.get_site_packages_dir().replace("'", "'\"'\"'")), _env=copy.copy(env)) # Go over all new additions and copy them back: info('Copying additions resulting from setup.py back ' 'into ctx.get_site_packages_dir()...') new_venv_additions = [] for f in (set(os.listdir(venv_site_packages_dir)) - set(previous_venv_contents)): new_venv_additions.append(f) full_path = os.path.join(venv_site_packages_dir, f) if os.path.isdir(full_path): shutil.copytree(full_path, os.path.join(ctx_site_packages_dir, f)) else: shutil.copy2(full_path, os.path.join(ctx_site_packages_dir, f)) # Undo all the changes we did to the venv-site packages: info('Reverting additions to ' 'virtualenv\'s site-packages...') for f in set(copied_over_contents + new_venv_additions): full_path = os.path.join(venv_site_packages_dir, f) if os.path.isdir(full_path): shutil.rmtree(full_path) else: os.remove(full_path) finally: os.remove("._tmp_p4a_recipe_constraints.txt")
def build_cython_components(self, arch): info('Cythonizing anything necessary in {}'.format(self.name)) env = self.get_recipe_env(arch) if self.ctx.python_recipe.from_crystax: command = sh.Command('python{}'.format( self.ctx.python_recipe.version)) site_packages_dirs = command( '-c', 'import site; print("\\n".join(site.getsitepackages()))') site_packages_dirs = site_packages_dirs.stdout.decode( 'utf-8').split('\n') # env['PYTHONPATH'] = '/usr/lib/python3.5/site-packages/:/usr/lib/python3.5' if 'PYTHONPATH' in env: env['PYTHONPATH'] = env + ':{}'.format( ':'.join(site_packages_dirs)) else: env['PYTHONPATH'] = ':'.join(site_packages_dirs) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.ctx.hostpython) # hostpython = sh.Command('python3.5') shprint(hostpython, '-c', 'import sys; print(sys.path)', _env=env) print('cwd is', realpath(curdir)) info('Trying first build of {} to get cython files: this is ' 'expected to fail'.format(self.name)) manually_cythonise = False try: shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, *self.setup_extra_args) except sh.ErrorReturnCode_1: print() info('{} first build failed (as expected)'.format(self.name)) manually_cythonise = True if manually_cythonise: self.cythonize_build(env=env) shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env, _tail=20, _critical=True, *self.setup_extra_args) else: info( 'First build appeared to complete correctly, skipping manual' 'cythonising.') print('stripping') build_lib = glob.glob('./build/lib*') shprint(sh.find, build_lib[0], '-name', '*.o', '-exec', env['STRIP'], '{}', ';', _env=env) print('stripped!?')
def download_file(self, url, target, cwd=None): """ (internal) Download an ``url`` to a ``target``. """ if not url: return info('Downloading {} from {}'.format(self.name, url)) if cwd: target = join(cwd, target) parsed_url = urlparse(url) if parsed_url.scheme in ('http', 'https'): def report_hook(index, blksize, size): if size <= 0: progression = '{0} bytes'.format(index * blksize) else: progression = '{0:.2f}%'.format(index * blksize * 100. / float(size)) if "CI" not in environ: stdout.write('- Download {}\r'.format(progression)) stdout.flush() if exists(target): unlink(target) # Download item with multiple attempts (for bad connections): attempts = 0 seconds = 1 while True: try: # jqueryui.com returns a 403 w/ the default user agent # Mozilla/5.0 doesnt handle redirection for liblzma url_opener.addheaders = [('User-agent', 'Wget/1.0')] urlretrieve(url, target, report_hook) except OSError as e: attempts += 1 if attempts >= 5: raise stdout.write( 'Download failed: {}; retrying in {} second(s)...'. format(e, seconds)) time.sleep(seconds) seconds *= 2 continue finally: url_opener.addheaders = url_orig_headers break return target elif parsed_url.scheme in ('git', 'git+file', 'git+ssh', 'git+http', 'git+https'): if isdir(target): with current_directory(target): shprint(sh.git, 'fetch', '--tags', '--recurse-submodules') if self.version: shprint(sh.git, 'checkout', self.version) branch = sh.git('branch', '--show-current') if branch: shprint(sh.git, 'pull') shprint(sh.git, 'pull', '--recurse-submodules') shprint(sh.git, 'submodule', 'update', '--recursive') else: if url.startswith('git+'): url = url[4:] shprint(sh.git, 'clone', '--recursive', url, target) if self.version: with current_directory(target): shprint(sh.git, 'checkout', self.version) shprint(sh.git, 'submodule', 'update', '--recursive') return target
def prebuild_arch(self, arch): """Make the build and target directories""" path = self.get_build_dir(arch.arch) if not exists(path): info("creating {}".format(path)) shprint(sh.mkdir, '-p', path)
def build_arch(self, arch, *extra_args): super(NDKRecipe, self).build_arch(arch) env = self.get_recipe_env(arch) with current_directory(self.get_build_dir(arch.arch)): shprint(sh.ndk_build, 'V=1', 'APP_ABI=' + arch.arch, *extra_args, _env=env)
def build_arch(self, arch): """simple shared compile""" env = self.get_recipe_env(arch, with_flags_in_cc=False) for path in ( self.get_build_dir(arch.arch), join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Lib'), join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Include'), ): if not exists(path): info("creating {}".format(path)) shprint(sh.mkdir, '-p', path) cli = env['CC'].split() cc = sh.Command(cli[0]) with current_directory(self.get_build_dir(arch.arch)): cflags = env['CFLAGS'].split() cflags.extend(['-I.', '-c', '-l.', 'ifaddrs.c', '-I.']) shprint(cc, *cflags, _env=env) cflags = env['CFLAGS'].split() cflags.extend( ['-shared', '-I.', 'ifaddrs.o', '-o', 'libifaddrs.so']) cflags.extend(env['LDFLAGS'].split()) shprint(cc, *cflags, _env=env) shprint(sh.cp, 'libifaddrs.so', self.ctx.get_libs_dir(arch.arch)) shprint(sh.cp, "libifaddrs.so", join(self.ctx.get_python_install_dir(), 'lib')) # drop header in to the Python include directory shprint( sh.cp, "ifaddrs.h", join( self.ctx.get_python_install_dir(), 'include/python{}'.format( self.ctx.python_recipe.version[0:3]))) include_path = join( self.ctx.python_recipe.get_build_dir(arch.arch), 'Include') shprint(sh.cp, "ifaddrs.h", include_path)
def get_recipe_env(self, arch=None, with_flags_in_cc=True): if self.from_crystax: return super(GuestPythonRecipe, self).get_recipe_env( arch=arch, with_flags_in_cc=with_flags_in_cc) env = environ.copy() android_host = env['HOSTARCH'] = arch.command_prefix toolchain = '{toolchain_prefix}-{toolchain_version}'.format( toolchain_prefix=self.ctx.toolchain_prefix, toolchain_version=self.ctx.toolchain_version) toolchain = join(self.ctx.ndk_dir, 'toolchains', toolchain, 'prebuilt', 'linux-x86_64') env['CC'] = ( '{clang} -target {target} -gcc-toolchain {toolchain}').format( clang=join(self.ctx.ndk_dir, 'toolchains', 'llvm', 'prebuilt', 'linux-x86_64', 'bin', 'clang'), target=arch.target, toolchain=toolchain) env['AR'] = join(toolchain, 'bin', android_host) + '-ar' env['LD'] = join(toolchain, 'bin', android_host) + '-ld' env['RANLIB'] = join(toolchain, 'bin', android_host) + '-ranlib' env['READELF'] = join(toolchain, 'bin', android_host) + '-readelf' env['STRIP'] = join(toolchain, 'bin', android_host) + '-strip' env['STRIP'] += ' --strip-debug --strip-unneeded' env['PATH'] = ('{hostpython_dir}:{old_path}').format( hostpython_dir=self.get_recipe('host' + self.name, self.ctx).get_path_to_python(), old_path=env['PATH']) ndk_flags = ( '-fPIC --sysroot={ndk_sysroot} -D__ANDROID_API__={android_api} ' '-isystem {ndk_android_host} -I{ndk_include}').format( ndk_sysroot=join(self.ctx.ndk_dir, 'sysroot'), android_api=self.ctx.ndk_api, ndk_android_host=join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include', android_host), ndk_include=join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include')) sysroot = self.ctx.ndk_platform env['CFLAGS'] = env.get('CFLAGS', '') + ' ' + ndk_flags env['CPPFLAGS'] = env.get('CPPFLAGS', '') + ' ' + ndk_flags env['LDFLAGS'] = env.get('LDFLAGS', '') + ' --sysroot={} -L{}'.format( sysroot, join(sysroot, 'usr', 'lib')) # Manually add the libs directory, and copy some object # files to the current directory otherwise they aren't # picked up. This seems necessary because the --sysroot # setting in LDFLAGS is overridden by the other flags. # TODO: Work out why this doesn't happen in the original # bpo-30386 Makefile system. logger.warning('Doing some hacky stuff to link properly') lib_dir = join(sysroot, 'usr', 'lib') if arch.arch == 'x86_64': lib_dir = join(sysroot, 'usr', 'lib64') env['LDFLAGS'] += ' -L{}'.format(lib_dir) shprint(sh.cp, join(lib_dir, 'crtbegin_so.o'), './') shprint(sh.cp, join(lib_dir, 'crtend_so.o'), './') env['SYSROOT'] = sysroot return env
def distribute_javaclasses(self, javaclass_dir): '''Copy existing javaclasses from build dir to current dist dir.''' info('Copying java files') for filename in glob.glob(javaclass_dir): shprint(sh.cp, '-a', filename, 'src')
def install_python_package(self, arch, name=None, env=None, is_dir=True): '''Automate the installation of a Python package (or a cython package where the cython components are pre-built).''' # arch = self.filtered_archs[0] # old kivy-ios way if name is None: name = self.name if env is None: env = self.get_recipe_env(arch) info('Installing {} into site-packages'.format(self.name)) with current_directory(self.get_build_dir(arch.arch)): hostpython = sh.Command(self.hostpython_location) # hostpython = sh.Command('python3.5') if self.ctx.python_recipe.from_crystax: # hppath = join(dirname(self.hostpython_location), 'Lib', # 'site-packages') hpenv = env.copy() # if 'PYTHONPATH' in hpenv: # hpenv['PYTHONPATH'] = ':'.join([hppath] + # hpenv['PYTHONPATH'].split(':')) # else: # hpenv['PYTHONPATH'] = hppath # hpenv['PYTHONHOME'] = self.ctx.get_python_install_dir() # shprint(hostpython, 'setup.py', 'build', # _env=hpenv, *self.setup_extra_args) shprint( hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(self.ctx.get_python_install_dir()), '--install-lib=.', # AND: will need to unhardcode the 3.5 when adding 2.7 (and other crystax supported versions) _env=hpenv, *self.setup_extra_args) # site_packages_dir = self.ctx.get_site_packages_dir() # built_files = glob.glob(join('build', 'lib*', '*')) # for filen in built_files: # shprint(sh.cp, '-r', filen, join(site_packages_dir, split(filen)[-1])) elif self.call_hostpython_via_targetpython: shprint(hostpython, 'setup.py', 'install', '-O2', _env=env, *self.setup_extra_args) else: hppath = join(dirname(self.hostpython_location), 'Lib', 'site-packages') hpenv = env.copy() if 'PYTHONPATH' in hpenv: hpenv['PYTHONPATH'] = ':'.join( [hppath] + hpenv['PYTHONPATH'].split(':')) else: hpenv['PYTHONPATH'] = hppath shprint(hostpython, 'setup.py', 'install', '-O2', '--root={}'.format(self.ctx.get_python_install_dir()), '--install-lib=lib/python2.7/site-packages', _env=hpenv, *self.setup_extra_args) # AND: Hardcoded python2.7 needs fixing # If asked, also install in the hostpython build dir if self.install_in_hostpython: self.install_hostpython_package(arch)
def distribute_javaclasses(self, javaclass_dir, dest_dir="src"): '''Copy existing javaclasses from build dir to current dist dir.''' info('Copying java files') ensure_dir(dest_dir) filenames = glob.glob(javaclass_dir) shprint(sh.cp, '-a', *filenames, dest_dir)