Ejemplo n.º 1
0
 def recipes(self, args):
     ctx = self.ctx
     if args.compact:
         print(" ".join(set(Recipe.list_recipes(ctx))))
     else:
         for name in sorted(Recipe.list_recipes(ctx)):
             try:
                 recipe = Recipe.get_recipe(name, ctx)
             except IOError:
                 warning('Recipe "{}" could not be loaded'.format(name))
             except SyntaxError:
                 import traceback
                 traceback.print_exc()
                 warning(('Recipe "{}" could not be loaded due to a '
                          'syntax error').format(name))
             version = str(recipe.version)
             print('{Fore.BLUE}{Style.BRIGHT}{recipe.name:<12} '
                   '{Style.RESET_ALL}{Fore.LIGHTBLUE_EX}'
                   '{version:<8}{Style.RESET_ALL}'.format(
                     recipe=recipe, Fore=Out_Fore, Style=Out_Style,
                     version=version))
             print('    {Fore.GREEN}depends: {recipe.depends}'
                   '{Fore.RESET}'.format(recipe=recipe, Fore=Out_Fore))
             if recipe.conflicts:
                 print('    {Fore.RED}conflicts: {recipe.conflicts}'
                       '{Fore.RESET}'
                       .format(recipe=recipe, Fore=Out_Fore))
             if recipe.opt_depends:
                 print('    {Fore.YELLOW}optional depends: '
                       '{recipe.opt_depends}{Fore.RESET}'
                       .format(recipe=recipe, Fore=Out_Fore))
Ejemplo n.º 2
0
    def set_libs_flags(self, env, arch):
        '''Takes care to properly link libraries with python depending on our
        requirements and the attribute :attr:`opt_depends`.
        '''
        if 'libffi' in self.ctx.recipe_build_order:
            info('Activating flags for libffi')
            recipe = Recipe.get_recipe('libffi', self.ctx)
            include = ' -I' + ' -I'.join(recipe.get_include_dirs(arch))
            ldflag = ' -L' + join(recipe.get_build_dir(arch.arch),
                                  recipe.get_host(arch), '.libs') + ' -lffi'
            env['CPPFLAGS'] = env.get('CPPFLAGS', '') + include
            env['LDFLAGS'] = env.get('LDFLAGS', '') + ldflag

        if 'openssl' in self.ctx.recipe_build_order:
            recipe = Recipe.get_recipe('openssl', self.ctx)
            openssl_build_dir = recipe.get_build_dir(arch.arch)
            setuplocal = join('Modules', 'Setup.local')
            shprint(sh.cp, join(self.get_recipe_dir(), 'Setup.local-ssl'), setuplocal)
            shprint(sh.sed, '-i.backup', 's#^SSL=.*#SSL={}#'.format(openssl_build_dir), setuplocal)
            env['OPENSSL_VERSION'] = recipe.version

        if 'sqlite3' in self.ctx.recipe_build_order:
            # Include sqlite3 in python2 build
            recipe = Recipe.get_recipe('sqlite3', self.ctx)
            include = ' -I' + recipe.get_build_dir(arch.arch)
            lib = ' -L' + recipe.get_lib_dir(arch) + ' -lsqlite3'
            # Insert or append to env
            flag = 'CPPFLAGS'
            env[flag] = env[flag] + include if flag in env else include
            flag = 'LDFLAGS'
            env[flag] = env[flag] + lib if flag in env else lib
            
        return env
Ejemplo n.º 3
0
 def get_bootstrap_from_recipes(cls, recipes, ctx):
     '''Returns a bootstrap whose recipe requirements do not conflict with
     the given recipes.'''
     info('Trying to find a bootstrap that matches the given recipes.')
     bootstraps = [cls.get_bootstrap(name, ctx)
                   for name in cls.list_bootstraps()]
     acceptable_bootstraps = []
     for bs in bootstraps:
         if not bs.can_be_chosen_automatically:
             continue
         possible_dependency_lists = expand_dependencies(bs.recipe_depends)
         for possible_dependencies in possible_dependency_lists:
             ok = True
             for recipe in possible_dependencies:
                 recipe = Recipe.get_recipe(recipe, ctx)
                 if any([conflict in recipes for conflict in recipe.conflicts]):
                     ok = False
                     break
             for recipe in recipes:
                 recipe = Recipe.get_recipe(recipe, ctx)
                 if any([conflict in possible_dependencies
                         for conflict in recipe.conflicts]):
                     ok = False
                     break
             if ok:
                 acceptable_bootstraps.append(bs)
     info('Found {} acceptable bootstraps: {}'.format(
         len(acceptable_bootstraps),
         [bs.name for bs in acceptable_bootstraps]))
     if acceptable_bootstraps:
         info('Using the first of these: {}'
              .format(acceptable_bootstraps[0].name))
         return acceptable_bootstraps[0]
     return None
Ejemplo n.º 4
0
    def recipes(self, args):
        parser = argparse.ArgumentParser(
                description="List all the available recipes")
        parser.add_argument(
                "--compact", action="store_true", default=False,
                help="Produce a compact list suitable for scripting")

        args = parser.parse_args(args)

        ctx = self.ctx
        if args.compact:
            print(" ".join(set(Recipe.list_recipes(ctx))))
        else:
            for name in sorted(Recipe.list_recipes(ctx)):
                recipe = Recipe.get_recipe(name, ctx)
                version = str(recipe.version)
                print('{Fore.BLUE}{Style.BRIGHT}{recipe.name:<12} '
                      '{Style.RESET_ALL}{Fore.LIGHTBLUE_EX}'
                      '{version:<8}{Style.RESET_ALL}'.format(
                        recipe=recipe, Fore=Out_Fore, Style=Out_Style,
                        version=version))
                print('    {Fore.GREEN}depends: {recipe.depends}'
                      '{Fore.RESET}'.format(recipe=recipe, Fore=Out_Fore))
                if recipe.conflicts:
                    print('    {Fore.RED}conflicts: {recipe.conflicts}'
                          '{Fore.RESET}'
                          .format(recipe=recipe, Fore=Out_Fore))
                if recipe.opt_depends:
                    print('    {Fore.YELLOW}optional depends: '
                          '{recipe.opt_depends}{Fore.RESET}'
                          .format(recipe=recipe, Fore=Out_Fore))
Ejemplo n.º 5
0
    def set_libs_flags(self, env, arch):
        '''Takes care to properly link libraries with python depending on our
        requirements and the attribute :attr:`opt_depends`.
        '''
        def add_flags(include_flags, link_dirs, link_libs):
            env['CPPFLAGS'] = env.get('CPPFLAGS', '') + include_flags
            env['LDFLAGS'] = env.get('LDFLAGS', '') + link_dirs
            env['LIBS'] = env.get('LIBS', '') + link_libs

        if 'sqlite3' in self.ctx.recipe_build_order:
            info('Activating flags for sqlite3')
            recipe = Recipe.get_recipe('sqlite3', self.ctx)
            add_flags(' -I' + recipe.get_build_dir(arch.arch),
                      ' -L' + recipe.get_lib_dir(arch), ' -lsqlite3')

        if 'libffi' in self.ctx.recipe_build_order:
            info('Activating flags for libffi')
            recipe = Recipe.get_recipe('libffi', self.ctx)
            # In order to force the correct linkage for our libffi library, we
            # set the following variable to point where is our libffi.pc file,
            # because the python build system uses pkg-config to configure it.
            env['PKG_CONFIG_PATH'] = recipe.get_build_dir(arch.arch)
            add_flags(' -I' + ' -I'.join(recipe.get_include_dirs(arch)),
                      ' -L' + join(recipe.get_build_dir(arch.arch), '.libs'),
                      ' -lffi')

        if 'openssl' in self.ctx.recipe_build_order:
            info('Activating flags for openssl')
            recipe = Recipe.get_recipe('openssl', self.ctx)
            add_flags(recipe.include_flags(arch),
                      recipe.link_dirs_flags(arch), recipe.link_libs_flags())
        return env
Ejemplo n.º 6
0
 def test_get_recipe(self):
     """
     Makes sure `get_recipe()` returns a `Recipe` object when possible.
     """
     ctx = Context()
     recipe_name = 'python3'
     recipe = Recipe.get_recipe(recipe_name, ctx)
     self.assertTrue(isinstance(recipe, Recipe))
     self.assertEqual(recipe.name, recipe_name)
     recipe_name = 'does_not_exist'
     with self.assertRaises(ValueError) as e:
         Recipe.get_recipe(recipe_name, ctx)
     self.assertEqual(
         e.exception.args[0], 'Recipe does not exist: {}'.format(recipe_name))
Ejemplo n.º 7
0
    def has_package(self, name, arch=None):
        # If this is a file path, it'll need special handling:
        if (name.find("/") >= 0 or name.find("\\") >= 0) and \
                name.find("://") < 0:  # (:// would indicate an url)
            if not os.path.exists(name):
                # Non-existing dir, cannot look this up.
                return False
            try:
                name = get_package_name(os.path.abspath(name))
            except ValueError:
                # Failed to look up any meaningful name.
                return False

        # Try to look up recipe by name:
        try:
            recipe = Recipe.get_recipe(name, self)
        except ValueError:
            pass
        else:
            name = getattr(recipe, 'site_packages_name', None) or name
        name = name.replace('.', '/')
        site_packages_dir = self.get_site_packages_dir(arch)
        return (exists(join(site_packages_dir, name)) or
                exists(join(site_packages_dir, name + '.py')) or
                exists(join(site_packages_dir, name + '.pyc')) or
                exists(join(site_packages_dir, name + '.pyo')) or
                exists(join(site_packages_dir, name + '.so')) or
                glob.glob(join(site_packages_dir, name + '-*.egg')))
Ejemplo n.º 8
0
def build_recipes(build_order, python_modules, ctx):
    # Put recipes in correct build order
    bs = ctx.bootstrap
    info_notify("Recipe build order is {}".format(build_order))
    if python_modules:
        python_modules = sorted(set(python_modules))
        info_notify(
            ('The requirements ({}) were not found as recipes, they will be '
             'installed with pip.').format(', '.join(python_modules)))

    recipes = [Recipe.get_recipe(name, ctx) for name in build_order]

    # download is arch independent
    info_main('# Downloading recipes ')
    for recipe in recipes:
        recipe.download_if_necessary()

    for arch in ctx.archs:
        info_main('# Building all recipes for arch {}'.format(arch.arch))

        info_main('# Unpacking recipes')
        for recipe in recipes:
            ensure_dir(recipe.get_build_container_dir(arch.arch))
            recipe.prepare_build_dir(arch.arch)

        info_main('# Prebuilding recipes')
        # 2) prebuild packages
        for recipe in recipes:
            info_main('Prebuilding {} for {}'.format(recipe.name, arch.arch))
            recipe.prebuild_arch(arch)
            recipe.apply_patches(arch)

        # 3) build packages
        info_main('# Building recipes')
        for recipe in recipes:
            info_main('Building {} for {}'.format(recipe.name, arch.arch))
            if recipe.should_build(arch):
                recipe.build_arch(arch)
            else:
                info('{} said it is already built, skipping'
                     .format(recipe.name))

        # 4) biglink everything
        # AND: Should make this optional
        info_main('# Biglinking object files')
        if not ctx.python_recipe or not ctx.python_recipe.from_crystax:
            biglink(ctx, arch)
        else:
            info('NDK is crystax, skipping biglink (will this work?)')

        # 5) postbuild packages
        info_main('# Postbuilding recipes')
        for recipe in recipes:
            info_main('Postbuilding {} for {}'.format(recipe.name, arch.arch))
            recipe.postbuild_arch(arch)

    info_main('# Installing pure Python modules')
    run_pymodules_install(ctx, python_modules)

    return
Ejemplo n.º 9
0
 def set_libs_flags(self, env, arch):
     env = super(Python3Recipe, self).set_libs_flags(env, arch)
     if 'openssl' in self.ctx.recipe_build_order:
         recipe = Recipe.get_recipe('openssl', self.ctx)
         self.configure_args += \
             ('--with-openssl=' + recipe.get_build_dir(arch.arch),)
     return env
Ejemplo n.º 10
0
    def build_arch(self, arch):
        super(LibxsltRecipe, self).build_arch(arch)
        env = self.get_recipe_env(arch)
        build_dir = self.get_build_dir(arch.arch)
        with current_directory(build_dir):
            # If the build is done with /bin/sh things blow up,
            # try really hard to use bash
            libxml2_recipe = Recipe.get_recipe('libxml2', self.ctx)
            libxml2_build_dir = libxml2_recipe.get_build_dir(arch.arch)
            build_arch = shprint(sh.gcc, '-dumpmachine').stdout.decode(
                'utf-8').split('\n')[0]

            if not exists('configure'):
                shprint(sh.Command('./autogen.sh'), _env=env)
            shprint(sh.Command('autoreconf'), '-vif', _env=env)
            shprint(sh.Command('./configure'),
                    '--build=' + build_arch,
                    '--host=' + arch.command_prefix,
                    '--target=' + arch.command_prefix,
                    '--without-plugins',
                    '--without-debug',
                    '--without-python',
                    '--without-crypto',
                    '--with-libxml-src=' + libxml2_build_dir,
                    '--disable-shared',
                    _env=env)
            shprint(sh.make, "V=1", _env=env)

            shutil.copyfile('libxslt/.libs/libxslt.a',
                            join(self.ctx.libs_dir, 'libxslt.a'))
            shutil.copyfile('libexslt/.libs/libexslt.a',
                            join(self.ctx.libs_dir, 'libexslt.a'))
Ejemplo n.º 11
0
    def get_env(self):
        env = {}

        env["CFLAGS"] = " ".join([
            "-DANDROID", "-mandroid", "-fomit-frame-pointer",
            "--sysroot", self.ctx.ndk_platform])

        env["CXXFLAGS"] = env["CFLAGS"]

        env["LDFLAGS"] = " ".join(['-lm'])

        py_platform = sys.platform
        if py_platform in ['linux2', 'linux3']:
            py_platform = 'linux'

        toolchain_prefix = self.ctx.toolchain_prefix
        toolchain_version = self.ctx.toolchain_version
        command_prefix = self.command_prefix

        env['TOOLCHAIN_PREFIX'] = toolchain_prefix
        env['TOOLCHAIN_VERSION'] = toolchain_version

        print('path is', environ['PATH'])
        cc = find_executable('{command_prefix}-gcc'.format(
            command_prefix=command_prefix), path=environ['PATH'])
        if cc is None:
            warning('Couldn\'t find executable for CC. This indicates a '
                    'problem locating the {} executable in the Android '
                    'NDK, not that you don\'t have a normal compiler '
                    'installed. Exiting.')
            exit(1)

        env['CC'] = '{command_prefix}-gcc {cflags}'.format(
            command_prefix=command_prefix,
            cflags=env['CFLAGS'])
        env['CXX'] = '{command_prefix}-g++ {cxxflags}'.format(
            command_prefix=command_prefix,
            cxxflags=env['CXXFLAGS'])

        env['AR'] = '{}-ar'.format(command_prefix)
        env['RANLIB'] = '{}-ranlib'.format(command_prefix)
        env['LD'] = '{}-ld'.format(command_prefix)
        env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix)
        env['MAKE'] = 'make -j5'
        env['READELF'] = '{}-readelf'.format(command_prefix)

        hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)

        # AND: This hardcodes python version 2.7, needs fixing
        env['BUILDLIB_PATH'] = join(
            hostpython_recipe.get_build_dir(self.arch),
            'build', 'lib.linux-{}-2.7'.format(uname()[-1]))

        env['PATH'] = environ['PATH']

        env['ARCH'] = self.arch

        return env
Ejemplo n.º 12
0
 def setUp(self):
     """
     Setups recipe and context.
     """
     self.context = Context()
     self.context.ndk_api = 21
     self.context.android_api = 27
     self.arch = ArchARMv7_a(self.context)
     self.recipe = Recipe.get_recipe('gevent', self.context)
Ejemplo n.º 13
0
 def test_list_recipes(self):
     """
     Trivial test verifying list_recipes returns a generator with some recipes.
     """
     ctx = Context()
     recipes = Recipe.list_recipes(ctx)
     self.assertTrue(isinstance(recipes, types.GeneratorType))
     recipes = list(recipes)
     self.assertIn('python3', recipes)
Ejemplo n.º 14
0
    def get_recipe_env(self, arch):
        env = super(CryptographyRecipe, self).get_recipe_env(arch)

        openssl_recipe = Recipe.get_recipe('openssl', self.ctx)
        env['CFLAGS'] += openssl_recipe.include_flags(arch)
        env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch)
        env['LIBS'] = openssl_recipe.link_libs_flags()

        return env
Ejemplo n.º 15
0
 def test_recipe_dirs(self):
     """
     Trivial `recipe_dirs()` test.
     Makes sure the list is not empty and has the root directory.
     """
     ctx = Context()
     recipes_dir = Recipe.recipe_dirs(ctx)
     # by default only the root dir `recipes` directory
     self.assertEqual(len(recipes_dir), 1)
     self.assertTrue(recipes_dir[0].startswith(ctx.root_dir))
Ejemplo n.º 16
0
    def conflicts(self, name):
        for name in self.keys():
            try:
                recipe = Recipe.get_recipe(name, self.ctx)
                conflicts = recipe.conflicts
            except IOError:
                conflicts = []

            if any([c in self for c in conflicts]):
                return True
        return False
Ejemplo n.º 17
0
    def conflicts(self):
        for name in self.keys():
            try:
                recipe = Recipe.get_recipe(name, self.ctx)
                conflicts = [dep.lower() for dep in recipe.conflicts]
            except ValueError:
                conflicts = []

            if any([c in self for c in conflicts]):
                return True
        return False
Ejemplo n.º 18
0
 def get_recipe_env(self, arch=None):
     env = super(PyCryptoRecipe, self).get_recipe_env(arch)
     openssl_build_dir = Recipe.get_recipe('openssl', self.ctx).get_build_dir(arch.arch)
     env['CC'] = '%s -I%s' % (env['CC'], join(openssl_build_dir, 'include'))
     env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format(
         self.ctx.get_libs_dir(arch.arch) +
         '-L{}'.format(self.ctx.libs_dir)) + ' -L{}'.format(
         openssl_build_dir)
     env['EXTRA_CFLAGS'] = '--host linux-armv'
     env['ac_cv_func_malloc_0_nonnull'] = 'yes'
     return env
Ejemplo n.º 19
0
    def clean_recipe_build(self, args):
        '''Deletes the build files of the given recipe.

        This is intended for debug purposes, you may experience
        strange behaviour or problems with some recipes (if their
        build has done unexpected state changes). If this happens, run
        clean_builds, or attempt to clean other recipes until things
        work again.
        '''
        recipe = Recipe.get_recipe(args.recipe, self.ctx)
        info('Cleaning build for {} recipe.'.format(recipe.name))
        recipe.clean_build()
Ejemplo n.º 20
0
def recursively_collect_orders(name, ctx, all_inputs, orders=[]):
    '''For each possible recipe ordering, try to add the new recipe name
    to that order. Recursively do the same thing with all the
    dependencies of each recipe.

    '''
    try:
        recipe = Recipe.get_recipe(name, ctx)
        if recipe.depends is None:
            dependencies = []
        else:
            # make all dependencies into lists so that product will work
            dependencies = [([dependency] if not isinstance(
                dependency, (list, tuple))
                            else dependency) for dependency in recipe.depends]

        # handle opt_depends: these impose requirements on the build
        # order only if already present in the list of recipes to build
        dependencies.extend([[d] for d in recipe.get_opt_depends_in_list(all_inputs)])

        if recipe.conflicts is None:
            conflicts = []
        else:
            conflicts = recipe.conflicts
    except IOError:
        # The recipe does not exist, so we assume it can be installed
        # via pip with no extra dependencies
        dependencies = []
        conflicts = []

    new_orders = []
    # for each existing recipe order, see if we can add the new recipe name
    for order in orders:
        if name in order:
            new_orders.append(deepcopy(order))
            continue
        if order.conflicts(name):
            continue
        if any([conflict in order for conflict in conflicts]):
            continue

        for dependency_set in product(*dependencies):
            new_order = deepcopy(order)
            new_order[name] = set(dependency_set)

            dependency_new_orders = [new_order]
            for dependency in dependency_set:
                dependency_new_orders = recursively_collect_orders(
                    dependency, ctx, all_inputs, dependency_new_orders)

            new_orders.extend(dependency_new_orders)

    return new_orders
Ejemplo n.º 21
0
    def set_libs_flags(self, env, arch):
        env = super(Python2Recipe, self).set_libs_flags(env, arch)
        if 'libffi' in self.ctx.recipe_build_order:
            # For python2 we need to tell configure that we want to use our
            # compiled libffi, this step is not necessary for python3.
            self.configure_args += ('--with-system-ffi',)

        if 'openssl' in self.ctx.recipe_build_order:
            recipe = Recipe.get_recipe('openssl', self.ctx)
            openssl_build = recipe.get_build_dir(arch.arch)
            env['OPENSSL_BUILD'] = openssl_build
            env['OPENSSL_VERSION'] = recipe.version
        return env
Ejemplo n.º 22
0
    def get_recipe_env(self, arch=None, clang=True):
        env = super(PyCryptoRecipe, self).get_recipe_env(arch)
        openssl_recipe = Recipe.get_recipe('openssl', self.ctx)
        env['CC'] = env['CC'] + openssl_recipe.include_flags(arch)

        env['LDFLAGS'] += ' -L{}'.format(self.ctx.get_libs_dir(arch.arch))
        env['LDFLAGS'] += ' -L{}'.format(self.ctx.libs_dir)
        env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch)
        env['LIBS'] = env.get('LIBS', '') + openssl_recipe.link_libs_flags()

        env['EXTRA_CFLAGS'] = '--host linux-armv'
        env['ac_cv_func_malloc_0_nonnull'] = 'yes'
        return env
Ejemplo n.º 23
0
 def set_libs_flags(self, env, arch):
     '''Takes care to properly link libraries with python depending on our
     requirements and the attribute :attr:`opt_depends`.
     '''
     if 'libffi' in self.ctx.recipe_build_order:
         info('Activating flags for libffi')
         recipe = Recipe.get_recipe('libffi', self.ctx)
         include = ' -I' + ' -I'.join(recipe.get_include_dirs(arch))
         ldflag = ' -L' + join(recipe.get_build_dir(arch.arch),
                               recipe.get_host(arch), '.libs') + ' -lffi'
         env['CPPFLAGS'] = env.get('CPPFLAGS', '') + include
         env['LDFLAGS'] = env.get('LDFLAGS', '') + ldflag
     return env
Ejemplo n.º 24
0
 def has_package(self, name, arch=None):
     try:
         recipe = Recipe.get_recipe(name, self)
     except IOError:
         pass
     else:
         name = getattr(recipe, 'site_packages_name', None) or name
     name = name.replace('.', '/')
     site_packages_dir = self.get_site_packages_dir(arch)
     return (exists(join(site_packages_dir, name)) or
             exists(join(site_packages_dir, name + '.py')) or
             exists(join(site_packages_dir, name + '.pyc')) or
             exists(join(site_packages_dir, name + '.pyo')) or
             exists(join(site_packages_dir, name + '.so')))
Ejemplo n.º 25
0
 def remove_remaining_conflicts(self, ctx):
     # It's unpleasant to have to pass ctx as an argument...
     '''Checks all possible graphs for conflicts that have arisen during
     the additon of alternative repice branches, as these are not checked
     for conflicts at the time.'''
     new_graphs = []
     for i, graph in enumerate(self.graphs):
         for name in graph.keys():
             recipe = Recipe.get_recipe(name, ctx)
             if any([c in graph for c in recipe.conflicts]):
                 break
         else:
             new_graphs.append(graph)
     self.graphs = new_graphs
Ejemplo n.º 26
0
 def recipes(self, args):
     ctx = self.ctx
     if args.compact:
         print(" ".join(set(Recipe.list_recipes(ctx))))
     else:
         for name in sorted(Recipe.list_recipes(ctx)):
             recipe = Recipe.get_recipe(name, ctx)
             version = str(recipe.version)
             print('{Fore.BLUE}{Style.BRIGHT}{recipe.name:<12} '
                   '{Style.RESET_ALL}{Fore.LIGHTBLUE_EX}'
                   '{version:<8}{Style.RESET_ALL}'.format(
                     recipe=recipe, Fore=Out_Fore, Style=Out_Style,
                     version=version))
             print('    {Fore.GREEN}depends: {recipe.depends}'
                   '{Fore.RESET}'.format(recipe=recipe, Fore=Out_Fore))
             if recipe.conflicts:
                 print('    {Fore.RED}conflicts: {recipe.conflicts}'
                       '{Fore.RESET}'
                       .format(recipe=recipe, Fore=Out_Fore))
             if recipe.opt_depends:
                 print('    {Fore.YELLOW}optional depends: '
                       '{recipe.opt_depends}{Fore.RESET}'
                       .format(recipe=recipe, Fore=Out_Fore))
Ejemplo n.º 27
0
    def clean_recipe_build(self, args):
        '''Deletes the build files of the given recipe.

        This is intended for debug purposes, you may experience
        strange behaviour or problems with some recipes (if their
        build has done unexpected state changes). If this happens, run
        clean_builds, or attempt to clean other recipes until things
        work again.
        '''
        parser = argparse.ArgumentParser(
            description="Delete all build files for the given recipe name.")
        parser.add_argument('recipe', help='The recipe name')
        args = parser.parse_args(args)

        recipe = Recipe.get_recipe(args.recipe, self.ctx)
        info('Cleaning build for {} recipe.'.format(recipe.name))
        recipe.clean_build()
Ejemplo n.º 28
0
 def setUp(self):
     """
     Setups recipe and context.
     """
     self.context = Context()
     self.arch = ArchARMv7_a(self.context)
     self.recipe = Recipe.get_recipe('reportlab', self.context)
     self.recipe.ctx = self.context
     self.bootstrap = None
     recipe_build_order, python_modules, bootstrap = \
         get_recipe_order_and_bootstrap(
             self.context, [self.recipe.name], self.bootstrap)
     self.context.recipe_build_order = recipe_build_order
     self.context.python_modules = python_modules
     self.context.setup_dirs(tempfile.gettempdir())
     self.bootstrap = bootstrap
     self.recipe_dir = self.recipe.get_build_dir(self.arch.arch)
     ensure_dir(self.recipe_dir)
Ejemplo n.º 29
0
def biglink(ctx, arch):
    # First, collate object files from each recipe
    info('Collating object files from each recipe')
    obj_dir = join(ctx.bootstrap.build_dir, 'collated_objects')
    ensure_dir(obj_dir)
    recipes = [Recipe.get_recipe(name, ctx) for name in ctx.recipe_build_order]
    for recipe in recipes:
        recipe_obj_dir = join(recipe.get_build_container_dir(arch.arch),
                              'objects_{}'.format(recipe.name))
        if not exists(recipe_obj_dir):
            info('{} recipe has no biglinkable files dir, skipping'
                 .format(recipe.name))
            continue
        files = glob.glob(join(recipe_obj_dir, '*'))
        if not len(files):
            info('{} recipe has no biglinkable files, skipping'
                 .format(recipe.name))
            continue
        info('{} recipe has object files, copying'.format(recipe.name))
        files.append(obj_dir)
        shprint(sh.cp, '-r', *files)

    env = arch.get_env()
    env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format(
        join(ctx.bootstrap.build_dir, 'obj', 'local', arch.arch))

    if not len(glob.glob(join(obj_dir, '*'))):
        info('There seem to be no libraries to biglink, skipping.')
        return
    info('Biglinking')
    info('target {}'.format(join(ctx.get_libs_dir(arch.arch),
                                 'libpymodules.so')))
    do_biglink = copylibs_function if ctx.copy_libs else biglink_function

    # Move to the directory containing crtstart_so.o and crtend_so.o
    # This is necessary with newer NDKs? A gcc bug?
    with current_directory(join(ctx.ndk_platform, 'usr', 'lib')):
        do_biglink(
            join(ctx.get_libs_dir(arch.arch), 'libpymodules.so'),
            obj_dir.split(' '),
            extra_link_dirs=[join(ctx.bootstrap.build_dir,
                                  'obj', 'local', arch.arch),
                             os.path.abspath('.')],
            env=env)
Ejemplo n.º 30
0
    def get_recipe_env(self, arch):
        env = super(LibxsltRecipe, self).get_recipe_env(arch)
        env['CONFIG_SHELL'] = '/bin/bash'
        env['SHELL'] = '/bin/bash'

        libxml2_recipe = Recipe.get_recipe('libxml2', self.ctx)
        libxml2_build_dir = libxml2_recipe.get_build_dir(arch.arch)
        libxml2_libs_dir = join(libxml2_build_dir, '.libs')

        env['CFLAGS'] = ' '.join([
            env['CFLAGS'],
            '-I' + libxml2_build_dir,
            '-I' + join(libxml2_build_dir, 'include', 'libxml'),
            '-I' + self.get_build_dir(arch.arch),
        ])
        env['LDFLAGS'] += ' -L' + libxml2_libs_dir
        env['LIBS'] = '-lxml2 -lz -lm'

        return env
Ejemplo n.º 31
0
def biglink(ctx, arch):
    # First, collate object files from each recipe
    info('Collating object files from each recipe')
    obj_dir = join(ctx.bootstrap.build_dir, 'collated_objects')
    ensure_dir(obj_dir)
    recipes = [Recipe.get_recipe(name, ctx) for name in ctx.recipe_build_order]
    for recipe in recipes:
        recipe_obj_dir = join(recipe.get_build_container_dir(arch.arch),
                              'objects_{}'.format(recipe.name))
        if not exists(recipe_obj_dir):
            info('{} recipe has no biglinkable files dir, skipping'.format(
                recipe.name))
            continue
        files = glob.glob(join(recipe_obj_dir, '*'))
        if not len(files):
            info('{} recipe has no biglinkable files, skipping'.format(
                recipe.name))
        info('{} recipe has object files, copying'.format(recipe.name))
        files.append(obj_dir)
        shprint(sh.cp, '-r', *files)

    env = arch.get_env()
    env['LDFLAGS'] = env['LDFLAGS'] + ' -L{}'.format(
        join(ctx.bootstrap.build_dir, 'obj', 'local', arch.arch))

    if not len(glob.glob(join(obj_dir, '*'))):
        info('There seem to be no libraries to biglink, skipping.')
        return
    info('Biglinking')
    info('target {}'.format(
        join(ctx.get_libs_dir(arch.arch), 'libpymodules.so')))
    biglink_function(join(ctx.get_libs_dir(arch.arch), 'libpymodules.so'),
                     obj_dir.split(' '),
                     extra_link_dirs=[
                         join(ctx.bootstrap.build_dir, 'obj', 'local',
                              arch.arch)
                     ],
                     env=env)
Ejemplo n.º 32
0
    def has_package(self, name, arch=None):
        # If this is a file path, it'll need special handling:
        if (name.find("/") >= 0 or name.find("\\") >= 0) and \
                name.find("://") < 0:  # (:// would indicate an url)
            if not os.path.exists(name):
                # Non-existing dir, cannot look this up.
                return False
            if os.path.exists(os.path.join(name, "setup.py")):
                # Get name from setup.py:
                name = subprocess.check_output(
                    [sys.executable, "setup.py", "--name"], cwd=name)
                try:
                    name = name.decode('utf-8', 'replace')
                except AttributeError:
                    pass
                name = name.strip()
                if len(name) == 0:
                    # Failed to look up any meaningful name.
                    return False
            else:
                # A folder with whatever, cannot look this up.
                return False

        # Try to look up recipe by name:
        try:
            recipe = Recipe.get_recipe(name, self)
        except IOError:
            pass
        else:
            name = getattr(recipe, 'site_packages_name', None) or name
        name = name.replace('.', '/')
        site_packages_dir = self.get_site_packages_dir(arch)
        return (exists(join(site_packages_dir, name))
                or exists(join(site_packages_dir, name + '.py'))
                or exists(join(site_packages_dir, name + '.pyc'))
                or exists(join(site_packages_dir, name + '.pyo'))
                or exists(join(site_packages_dir, name + '.so'))
                or glob.glob(join(site_packages_dir, name + '-*.egg')))
Ejemplo n.º 33
0
def expand_dependencies(recipes, ctx):
    """ This function expands to lists of all different available
        alternative recipe combinations, with the dependencies added in
        ONLY for all the not-with-alternative recipes.
        (So this is like the deps graph very simplified and incomplete, but
         hopefully good enough for most basic bootstrap compatibility checks)
    """

    # Add in all the deps of recipes where there is no alternative:
    recipes_with_deps = list(recipes)
    for entry in recipes:
        if not isinstance(entry, (tuple, list)) or len(entry) == 1:
            if isinstance(entry, (tuple, list)):
                entry = entry[0]
            try:
                recipe = Recipe.get_recipe(entry, ctx)
                recipes_with_deps += recipe.depends
            except ValueError:
                # it's a pure python package without a recipe, so we
                # don't know the dependencies...skipping for now
                pass

    # Split up lists by available alternatives:
    recipe_lists = [[]]
    for recipe in recipes_with_deps:
        if isinstance(recipe, (tuple, list)):
            new_recipe_lists = []
            for alternative in recipe:
                for old_list in recipe_lists:
                    new_list = [i for i in old_list]
                    new_list.append(alternative)
                    new_recipe_lists.append(new_list)
            recipe_lists = new_recipe_lists
        else:
            for existing_list in recipe_lists:
                existing_list.append(recipe)
    return recipe_lists
Ejemplo n.º 34
0
    def build_cython_components(self, arch):
        libzmq_recipe = Recipe.get_recipe('libzmq', self.ctx)
        libzmq_prefix = join(libzmq_recipe.get_build_dir(arch.arch), "install")
        self.setup_extra_args = ["--zmq={}".format(libzmq_prefix)]
        self.build_cmd = "configure"

        env = self.get_recipe_env(arch)
        setup_cfg = join(self.get_build_dir(arch.arch), "setup.cfg")
        with open(setup_cfg, "wb") as fd:
            fd.write("""
[global]
zmq_prefix = {}
skip_check_zmq = True
""".format(libzmq_prefix).encode())

        return super().build_cython_components(arch)

        with current_directory(self.get_build_dir(arch.arch)):
            hostpython = sh.Command(self.hostpython_location)
            shprint(hostpython, 'setup.py', 'configure', '-v', _env=env)
            shprint(hostpython, 'setup.py', 'build_ext', '-v', _env=env)
            build_dir = glob.glob('build/lib.*')[0]
            shprint(sh.find, build_dir, '-name', '"*.o"', '-exec',
                    env['STRIP'], '{}', ';', _env=env)
Ejemplo n.º 35
0
    def test_get_recipe_env(
        self,
        mock_find_executable,
        mock_glob,
        mock_ensure_dir,
        mock_check_recipe_choices,
    ):
        """
        Test that method
        :meth:`~pythonforandroid.recipes.pyicu.PyICURecipe.get_recipe_env`
        returns the expected flags
        """
        icu_recipe = Recipe.get_recipe("icu", self.ctx)

        mock_find_executable.return_value = (
            "/opt/android/android-ndk/toolchains/"
            "llvm/prebuilt/linux-x86_64/bin/clang"
        )
        mock_glob.return_value = ["llvm"]
        mock_check_recipe_choices.return_value = sorted(
            self.ctx.recipe_build_order
        )

        expected_pyicu_libs = [
            lib[3:-3] for lib in icu_recipe.built_libraries.keys()
        ]
        env = self.recipe.get_recipe_env(self.arch)
        self.assertEqual(":".join(expected_pyicu_libs), env["PYICU_LIBRARIES"])
        self.assertIn("include/icu", env["CPPFLAGS"])
        self.assertIn("icu4c/icu_build/lib", env["LDFLAGS"])

        # make sure that the mocked methods are actually called
        mock_glob.assert_called()
        mock_ensure_dir.assert_called()
        mock_find_executable.assert_called()
        mock_check_recipe_choices.assert_called()
Ejemplo n.º 36
0
def recursively_collect_orders(name,
                               ctx,
                               all_inputs,
                               orders=None,
                               blacklist=None):
    '''For each possible recipe ordering, try to add the new recipe name
    to that order. Recursively do the same thing with all the
    dependencies of each recipe.

    '''
    name = name.lower()
    if orders is None:
        orders = []
    if blacklist is None:
        blacklist = set()
    try:
        recipe = Recipe.get_recipe(name, ctx)
        dependencies = get_dependency_tuple_list_for_recipe(
            recipe, blacklist=blacklist)

        # handle opt_depends: these impose requirements on the build
        # order only if already present in the list of recipes to build
        dependencies.extend(
            fix_deplist([[d]
                         for d in recipe.get_opt_depends_in_list(all_inputs)
                         if d.lower() not in blacklist]))

        if recipe.conflicts is None:
            conflicts = []
        else:
            conflicts = [dep.lower() for dep in recipe.conflicts]
    except ValueError:
        # The recipe does not exist, so we assume it can be installed
        # via pip with no extra dependencies
        dependencies = []
        conflicts = []

    new_orders = []
    # for each existing recipe order, see if we can add the new recipe name
    for order in orders:
        if name in order:
            new_orders.append(deepcopy(order))
            continue
        if order.conflicts():
            continue
        if any([conflict in order for conflict in conflicts]):
            continue

        for dependency_set in product(*dependencies):
            new_order = deepcopy(order)
            new_order[name] = set(dependency_set)

            dependency_new_orders = [new_order]
            for dependency in dependency_set:
                dependency_new_orders = recursively_collect_orders(
                    dependency,
                    ctx,
                    all_inputs,
                    dependency_new_orders,
                    blacklist=blacklist)

            new_orders.extend(dependency_new_orders)

    return new_orders
Ejemplo n.º 37
0
    def set_libs_flags(self, env, arch):
        """Takes care to properly link libraries with python depending on our
        requirements and the attribute :attr:`opt_depends`.
        """
        def add_flags(include_flags, link_dirs, link_libs):
            env["CPPFLAGS"] = env.get("CPPFLAGS", "") + include_flags
            env["LDFLAGS"] = env.get("LDFLAGS", "") + link_dirs
            env["LIBS"] = env.get("LIBS", "") + link_libs

        if "sqlite3" in self.ctx.recipe_build_order:
            info("Activating flags for sqlite3")
            recipe = Recipe.get_recipe("sqlite3", self.ctx)
            add_flags(
                " -I" + recipe.get_build_dir(arch.arch),
                " -L" + recipe.get_lib_dir(arch),
                " -lsqlite3",
            )

        if "libffi" in self.ctx.recipe_build_order:
            info("Activating flags for libffi")
            recipe = Recipe.get_recipe("libffi", self.ctx)
            # In order to force the correct linkage for our libffi library, we
            # set the following variable to point where is our libffi.pc file,
            # because the python build system uses pkg-config to configure it.
            env["PKG_CONFIG_PATH"] = recipe.get_build_dir(arch.arch)
            add_flags(
                " -I" + " -I".join(recipe.get_include_dirs(arch)),
                " -L" + join(recipe.get_build_dir(arch.arch), ".libs"),
                " -lffi",
            )

        if "openssl" in self.ctx.recipe_build_order:
            info("Activating flags for openssl")
            recipe = Recipe.get_recipe("openssl", self.ctx)
            self.configure_args += ("--with-openssl=" +
                                    recipe.get_build_dir(arch.arch), )
            add_flags(
                recipe.include_flags(arch),
                recipe.link_dirs_flags(arch),
                recipe.link_libs_flags(),
            )

        for library_name in {"libbz2", "liblzma"}:
            if library_name in self.ctx.recipe_build_order:
                info(f"Activating flags for {library_name}")
                recipe = Recipe.get_recipe(library_name, self.ctx)
                add_flags(
                    recipe.get_library_includes(arch),
                    recipe.get_library_ldflags(arch),
                    recipe.get_library_libs_flag(),
                )

        # python build system contains hardcoded zlib version which prevents
        # the build of zlib module, here we search for android's zlib version
        # and sets the right flags, so python can be build with android's zlib
        info("Activating flags for android's zlib")
        zlib_lib_path = join(self.ctx.ndk_platform, "usr", "lib")
        zlib_includes = join(self.ctx.ndk_dir, "sysroot", "usr", "include")
        zlib_h = join(zlib_includes, "zlib.h")
        try:
            with open(zlib_h) as fileh:
                zlib_data = fileh.read()
        except IOError:
            raise BuildInterruptingException(
                "Could not determine android's zlib version, no zlib.h ({}) in"
                " the NDK dir includes".format(zlib_h))
        for line in zlib_data.split("\n"):
            if line.startswith("#define ZLIB_VERSION "):
                break
        else:
            raise BuildInterruptingException(
                "Could not parse zlib.h...so we cannot find zlib version,"
                "required by python build,")
        env["ZLIB_VERSION"] = line.replace("#define ZLIB_VERSION ", "")
        add_flags(" -I" + zlib_includes, " -L" + zlib_lib_path, " -lz")

        return env
Ejemplo n.º 38
0
    def get_env(self):
        env = {}

        env["CFLAGS"] = " ".join([
            "-DANDROID", "-mandroid", "-fomit-frame-pointer", "--sysroot",
            self.ctx.ndk_platform
        ])

        env["CXXFLAGS"] = env["CFLAGS"]

        env["LDFLAGS"] = " ".join(['-lm'])

        py_platform = sys.platform
        if py_platform in ['linux2', 'linux3']:
            py_platform = 'linux'

        toolchain_prefix = self.ctx.toolchain_prefix
        toolchain_version = self.ctx.toolchain_version
        command_prefix = self.command_prefix

        env['TOOLCHAIN_PREFIX'] = toolchain_prefix
        env['TOOLCHAIN_VERSION'] = toolchain_version

        ccache = ''
        if self.ctx.ccache and bool(int(environ.get('USE_CCACHE', '1'))):
            print('ccache found, will optimize builds')
            ccache = self.ctx.ccache + ' '
            env['USE_CCACHE'] = '1'
            env['NDK_CCACHE'] = self.ctx.ccache

        print('path is', environ['PATH'])
        cc = find_executable(
            '{command_prefix}-gcc'.format(command_prefix=command_prefix),
            path=environ['PATH'])
        if cc is None:
            warning('Couldn\'t find executable for CC. This indicates a '
                    'problem locating the {} executable in the Android '
                    'NDK, not that you don\'t have a normal compiler '
                    'installed. Exiting.')
            exit(1)

        env['CC'] = '{ccache}{command_prefix}-gcc {cflags}'.format(
            command_prefix=command_prefix, ccache=ccache, cflags=env['CFLAGS'])
        env['CXX'] = '{ccache}{command_prefix}-g++ {cxxflags}'.format(
            command_prefix=command_prefix,
            ccache=ccache,
            cxxflags=env['CXXFLAGS'])

        env['AR'] = '{}-ar'.format(command_prefix)
        env['RANLIB'] = '{}-ranlib'.format(command_prefix)
        env['LD'] = '{}-ld'.format(command_prefix)
        env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix)
        env['MAKE'] = 'make -j5'
        env['READELF'] = '{}-readelf'.format(command_prefix)

        hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)

        # AND: This hardcodes python version 2.7, needs fixing
        env['BUILDLIB_PATH'] = join(hostpython_recipe.get_build_dir(self.arch),
                                    'build',
                                    'lib.linux-{}-2.7'.format(uname()[-1]))

        env['PATH'] = environ['PATH']

        env['ARCH'] = self.arch

        return env
Ejemplo n.º 39
0
def obvious_conflict_checker(ctx, name_tuples, blacklist=None):
    """ This is a pre-flight check function that will completely ignore
        recipe order or choosing an actual value in any of the multiple
        choice tuples/dependencies, and just do a very basic obvious
        conflict check.
    """
    deps_were_added_by = dict()
    deps = set()
    if blacklist is None:
        blacklist = set()

    # Add dependencies for all recipes:
    to_be_added = [(name_tuple, None) for name_tuple in name_tuples]
    while len(to_be_added) > 0:
        current_to_be_added = list(to_be_added)
        to_be_added = []
        for (added_tuple, adding_recipe) in current_to_be_added:
            assert (type(added_tuple) == tuple)
            if len(added_tuple) > 1:
                # No obvious commitment in what to add, don't check it itself
                # but throw it into deps for later comparing against
                # (Remember this function only catches obvious issues)
                deps.add(added_tuple)
                continue

            name = added_tuple[0]
            recipe_conflicts = set()
            recipe_dependencies = []
            try:
                # Get recipe to add and who's ultimately adding it:
                recipe = Recipe.get_recipe(name, ctx)
                recipe_conflicts = {c.lower() for c in recipe.conflicts}
                recipe_dependencies = get_dependency_tuple_list_for_recipe(
                    recipe, blacklist=blacklist)
            except ValueError:
                pass
            adder_first_recipe_name = adding_recipe or name

            # Collect the conflicts:
            triggered_conflicts = []
            for dep_tuple_list in deps:
                # See if the new deps conflict with things added before:
                if set(dep_tuple_list).intersection(recipe_conflicts) == set(
                        dep_tuple_list):
                    triggered_conflicts.append(dep_tuple_list)
                    continue

                # See if what was added before conflicts with the new deps:
                if len(dep_tuple_list) > 1:
                    # Not an obvious commitment to a specific recipe/dep
                    # to be added, so we won't check.
                    # (remember this function only catches obvious issues)
                    continue
                try:
                    dep_recipe = Recipe.get_recipe(dep_tuple_list[0], ctx)
                except ValueError:
                    continue
                conflicts = [c.lower() for c in dep_recipe.conflicts]
                if name in conflicts:
                    triggered_conflicts.append(dep_tuple_list)

            # Throw error on conflict:
            if triggered_conflicts:
                # Get first conflict and see who added that one:
                adder_second_recipe_name = "'||'".join(triggered_conflicts[0])
                second_recipe_original_adder = deps_were_added_by.get(
                    (adder_second_recipe_name, ), None)
                if second_recipe_original_adder:
                    adder_second_recipe_name = second_recipe_original_adder

                # Prompt error:
                raise BuildInterruptingException(
                    "Conflict detected: '{}'"
                    " inducing dependencies {}, and '{}'"
                    " inducing conflicting dependencies {}".format(
                        adder_first_recipe_name, (recipe.name, ),
                        adder_second_recipe_name, triggered_conflicts[0]))

            # Actually add it to our list:
            deps.add(added_tuple)
            deps_were_added_by[added_tuple] = adding_recipe

            # Schedule dependencies to be added
            to_be_added += [(dep, adder_first_recipe_name or name)
                            for dep in recipe_dependencies if dep not in deps]
    # If we came here, then there were no obvious conflicts.
    return None
Ejemplo n.º 40
0
def get_recipe_order_and_bootstrap(ctx, names, bs=None):
    recipes_to_load = set(names)
    if bs is not None and bs.recipe_depends:
        recipes_to_load = recipes_to_load.union(set(bs.recipe_depends))

    possible_orders = []

    # get all possible order graphs, as names may include tuples/lists
    # of alternative dependencies
    names = [([name] if not isinstance(name, (list, tuple)) else name)
             for name in names]
    for name_set in product(*names):
        new_possible_orders = [RecipeOrder(ctx)]
        for name in name_set:
            new_possible_orders = recursively_collect_orders(
                name, ctx, orders=new_possible_orders)
        possible_orders.extend(new_possible_orders)

    # turn each order graph into a linear list if possible
    orders = []
    for possible_order in possible_orders:
        try:
            order = find_order(possible_order)
        except ValueError:  # a circular dependency was found
            info('Circular dependency found in graph {}, skipping it.'.format(
                possible_order))
            continue
        except:
            warning('Failed to import recipe named {}; the recipe exists '
                    'but appears broken.'.format(name))
            warning('Exception was:')
            raise
        orders.append(list(order))

    # prefer python2 and SDL2 if available
    orders = sorted(orders,
                    key=lambda order: -('python2' in order) -
                    ('sdl2' in order))

    if not orders:
        raise BuildInterruptingException(
            'Didn\'t find any valid dependency graphs. This means that some of your '
            'requirements pull in conflicting dependencies.')

    # It would be better to check against possible orders other
    # than the first one, but in practice clashes will be rare,
    # and can be resolved by specifying more parameters
    chosen_order = orders[0]
    if len(orders) > 1:
        info('Found multiple valid dependency orders:')
        for order in orders:
            info('    {}'.format(order))
        info('Using the first of these: {}'.format(chosen_order))
    else:
        info('Found a single valid recipe set: {}'.format(chosen_order))

    if bs is None:
        bs = Bootstrap.get_bootstrap_from_recipes(chosen_order, ctx)
        recipes, python_modules, bs = get_recipe_order_and_bootstrap(
            ctx, chosen_order, bs=bs)
    else:
        # check if each requirement has a recipe
        recipes = []
        python_modules = []
        for name in chosen_order:
            try:
                recipe = Recipe.get_recipe(name, ctx)
                python_modules += recipe.python_depends
            except IOError:
                python_modules.append(name)
            else:
                recipes.append(name)

    python_modules = list(set(python_modules))
    return recipes, python_modules, bs
Ejemplo n.º 41
0
    def do_python_build(self, arch):
        shprint(sh.cp, self.ctx.hostpython, self.get_build_dir(arch.arch))
        shprint(sh.cp, self.ctx.hostpgen,
                join(self.get_build_dir(arch.arch), 'Parser'))
        hostpython = join(self.get_build_dir(arch.arch), 'hostpython')
        hostpgen = join(self.get_build_dir(arch.arch), 'hostpython')

        with current_directory(self.get_build_dir(arch.arch)):
            hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
            shprint(sh.cp, join(hostpython_recipe.get_recipe_dir(), 'Setup'),
                    'Modules')
            env = arch.get_env()

            # AND: Should probably move these to get_recipe_env for
            # neatness, but the whole recipe needs tidying along these
            # lines
            env['HOSTARCH'] = 'arm-linux-androideabi'
            env['BUILDARCH'] = shprint(
                sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0]
            env['CFLAGS'] = ' '.join([env['CFLAGS'], '-DNO_MALLINFO'])
            env['LDFLAGS'] += ' -Wl,--allow-multiple-definition'

            # TODO need to add a should_build that checks if optional
            # dependencies have changed (possibly in a generic way)
            if 'openssl' in self.ctx.recipe_build_order:
                r = Recipe.get_recipe('openssl', self.ctx)
                openssl_build_dir = r.get_build_dir(arch.arch)
                setuplocal = join('Modules', 'Setup.local')
                shprint(sh.cp, join(self.get_recipe_dir(), 'Setup.local-ssl'),
                        setuplocal)
                #shprint(sh.cat, join(self.get_recipe_dir(), 'Setup.local-ssl'), '>>', setuplocal)
                shprint(sh.sed, '-i.backup',
                        's#^SSL=.*#SSL={}#'.format(openssl_build_dir),
                        setuplocal)
                env['OPENSSL_VERSION'] = r.version
                env['CFLAGS'] += ' -I%s' % join(openssl_build_dir, 'include')
                env['LDFLAGS'] += ' -L%s' % openssl_build_dir

            if 'sqlite3' in self.ctx.recipe_build_order:
                # Include sqlite3 in python2 build
                r = Recipe.get_recipe('sqlite3', self.ctx)
                i = ' -I' + r.get_build_dir(arch.arch)
                l = ' -L' + r.get_lib_dir(arch) + ' -lsqlite3'
                # Insert or append to env
                f = 'CPPFLAGS'
                env[f] = env[f] + i if f in env else i
                f = 'LDFLAGS'
                env[f] = env[f] + l if f in env else l

            with open('config.site', 'w') as fileh:
                fileh.write('''
    ac_cv_file__dev_ptmx=no
    ac_cv_file__dev_ptc=no
    ac_cv_have_long_long_format=yes
                ''')

            configure = sh.Command('./configure')
            # AND: OFLAG isn't actually set, should it be?
            shprint(configure,
                    'CROSS_COMPILE_TARGET=yes',
                    'CONFIG_SITE=config.site',
                    '--host={}'.format(env['HOSTARCH']),
                    '--build={}'.format(env['BUILDARCH']),
                    '--prefix={}'.format(realpath('./python-install')),
                    '--enable-shared',
                    '--enable-ipv6',
                    '--disable-toolbox-glue',
                    '--disable-framework',
                    '--with-system-ffi',
                    _env=env)

            # AND: tito left this comment in the original source. It's still true!
            # FIXME, the first time, we got a error at:
            # python$EXE ../../Tools/scripts/h2py.py -i '(u_long)' /usr/include/netinet/in.h
            # /home/tito/code/python-for-android/build/python/Python-2.7.2/python: 1: Syntax error: word unexpected (expecting ")")
            # because at this time, python is arm, not x86. even that, why /usr/include/netinet/in.h is used ?
            # check if we can avoid this part.

            # Hardcoded, remove -I/usr/include/x86_64-linux-gnu from CCSHARED and CFLAGS
            # to prevent overriding android arm sysroot includes
            make = sh.Command(env['MAKE'].split(' ')[0])
            print('First install (expected to fail...')
            try:
                shprint(make,
                        '-j5',
                        'install',
                        'HOSTPYTHON={}'.format(hostpython),
                        'HOSTPGEN={}'.format(hostpgen),
                        'CROSS_COMPILE_TARGET=yes',
                        'INSTSONAME=libpython2.7.so',
                        _env=env)
            except sh.ErrorReturnCode_2:
                print(
                    'First python2 make failed. This is expected, trying again.'
                )

            print('Make compile...')
            shprint(make,
                    '-j5',
                    'HOSTPYTHON={}'.format(hostpython),
                    'HOSTPGEN={}'.format(hostpgen),
                    'CROSS_COMPILE_TARGET=yes',
                    'INSTSONAME=libpython2.7.so',
                    _env=env)

            print('Second install (expected to work)')
            shprint(sh.touch, 'python.exe', 'python')
            # -k added to keep going (need to figure out the reason for make: *** [libinstall] Error 1)
            shprint(make,
                    '-j5',
                    'install',
                    'HOSTPYTHON={}'.format(hostpython),
                    'HOSTPGEN={}'.format(hostpgen),
                    'CROSS_COMPILE_TARGET=yes',
                    'INSTSONAME=libpython2.7.so',
                    _env=env)

            if is_darwin():
                shprint(sh.cp,
                        join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'Lib'))
                shprint(sh.cp,
                        join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'lib', 'python2.7'))

            # reduce python
            for dir_name in ('test', join('json', 'tests'), 'lib-tk',
                             join('sqlite3', 'test'), join('unittest, test'),
                             join('lib2to3', 'tests'), join('bsddb', 'tests'),
                             join('distutils',
                                  'tests'), join('email', 'test'), 'curses'):
                shprint(sh.rm, '-rf',
                        join('python-install', 'lib', 'python2.7', dir_name))
Ejemplo n.º 42
0
def get_recipe_order_and_bootstrap(ctx, names, bs=None, blacklist=None):
    # Get set of recipe/dependency names, clean up and add bootstrap deps:
    names = set(names)
    if bs is not None and bs.recipe_depends:
        names = names.union(set(bs.recipe_depends))
    names = fix_deplist([([name] if not isinstance(name,
                                                   (list, tuple)) else name)
                         for name in names])
    if blacklist is None:
        blacklist = set()
    blacklist = {bitem.lower() for bitem in blacklist}

    # Remove all values that are in the blacklist:
    names_before_blacklist = list(names)
    names = []
    for name in names_before_blacklist:
        cleaned_up_tuple = tuple(
            [item for item in name if item not in blacklist])
        if cleaned_up_tuple:
            names.append(cleaned_up_tuple)

    # Do check for obvious conflicts (that would trigger in any order, and
    # without comitting to any specific choice in a multi-choice tuple of
    # dependencies):
    obvious_conflict_checker(ctx, names, blacklist=blacklist)
    # If we get here, no obvious conflicts!

    # get all possible order graphs, as names may include tuples/lists
    # of alternative dependencies
    possible_orders = []
    for name_set in product(*names):
        new_possible_orders = [RecipeOrder(ctx)]
        for name in name_set:
            new_possible_orders = recursively_collect_orders(
                name,
                ctx,
                name_set,
                orders=new_possible_orders,
                blacklist=blacklist)
        possible_orders.extend(new_possible_orders)

    # turn each order graph into a linear list if possible
    orders = []
    for possible_order in possible_orders:
        try:
            order = find_order(possible_order)
        except ValueError:  # a circular dependency was found
            info('Circular dependency found in graph {}, skipping it.'.format(
                possible_order))
            continue
        orders.append(list(order))

    # prefer python3 and SDL2 if available
    orders = sorted(orders,
                    key=lambda order: -('python3' in order) -
                    ('sdl2' in order))

    if not orders:
        raise BuildInterruptingException(
            'Didn\'t find any valid dependency graphs. '
            'This means that some of your '
            'requirements pull in conflicting dependencies.')

    # It would be better to check against possible orders other
    # than the first one, but in practice clashes will be rare,
    # and can be resolved by specifying more parameters
    chosen_order = orders[0]
    if len(orders) > 1:
        info('Found multiple valid dependency orders:')
        for order in orders:
            info('    {}'.format(order))
        info('Using the first of these: {}'.format(chosen_order))
    else:
        info('Found a single valid recipe set: {}'.format(chosen_order))

    if bs is None:
        bs = Bootstrap.get_bootstrap_from_recipes(chosen_order, ctx)
        if bs is None:
            # Note: don't remove this without thought, causes infinite loop
            raise BuildInterruptingException(
                "Could not find any compatible bootstrap!")
        recipes, python_modules, bs = get_recipe_order_and_bootstrap(
            ctx, chosen_order, bs=bs, blacklist=blacklist)
    else:
        # check if each requirement has a recipe
        recipes = []
        python_modules = []
        for name in chosen_order:
            try:
                recipe = Recipe.get_recipe(name, ctx)
                python_modules += recipe.python_depends
            except ValueError:
                python_modules.append(name)
            else:
                recipes.append(name)

    python_modules = list(set(python_modules))
    return recipes, python_modules, bs
Ejemplo n.º 43
0
    def __init__(self):

        argv = sys.argv
        self.warn_on_carriage_return_args(argv)
        # Buildozer used to pass these arguments in a now-invalid order
        # If that happens, apply this fix
        # This fix will be removed once a fixed buildozer is released
        if (len(argv) > 2
                and argv[1].startswith('--color')
                and argv[2].startswith('--storage-dir')):
            argv.append(argv.pop(1))  # the --color arg
            argv.append(argv.pop(1))  # the --storage-dir arg

        parser = NoAbbrevParser(
            description='A packaging tool for turning Python scripts and apps '
                        'into Android APKs')

        generic_parser = argparse.ArgumentParser(
            add_help=False,
            description='Generic arguments applied to all commands')
        argparse.ArgumentParser(
            add_help=False, description='Arguments for dist building')

        generic_parser.add_argument(
            '--debug', dest='debug', action='store_true', default=False,
            help='Display debug output and all build info')
        generic_parser.add_argument(
            '--color', dest='color', choices=['always', 'never', 'auto'],
            help='Enable or disable color output (default enabled on tty)')
        generic_parser.add_argument(
            '--sdk-dir', '--sdk_dir', dest='sdk_dir', default='',
            help='The filepath where the Android SDK is installed')
        generic_parser.add_argument(
            '--ndk-dir', '--ndk_dir', dest='ndk_dir', default='',
            help='The filepath where the Android NDK is installed')
        generic_parser.add_argument(
            '--android-api',
            '--android_api',
            dest='android_api',
            default=0,
            type=int,
            help=('The Android API level to build against defaults to {} if '
                  'not specified.').format(RECOMMENDED_TARGET_API))
        generic_parser.add_argument(
            '--ndk-version', '--ndk_version', dest='ndk_version', default=None,
            help=('DEPRECATED: the NDK version is now found automatically or '
                  'not at all.'))
        generic_parser.add_argument(
            '--ndk-api', type=int, default=None,
            help=('The Android API level to compile against. This should be your '
                  '*minimal supported* API, not normally the same as your --android-api. '
                  'Defaults to min(ANDROID_API, {}) if not specified.').format(RECOMMENDED_NDK_API))
        generic_parser.add_argument(
            '--symlink-java-src', '--symlink_java_src',
            action='store_true',
            dest='symlink_java_src',
            default=False,
            help=('If True, symlinks the java src folder during build and dist '
                  'creation. This is useful for development only, it could also'
                  ' cause weird problems.'))

        default_storage_dir = user_data_dir('python-for-android')
        if ' ' in default_storage_dir:
            default_storage_dir = '~/.python-for-android'
        generic_parser.add_argument(
            '--storage-dir', dest='storage_dir', default=default_storage_dir,
            help=('Primary storage directory for downloads and builds '
                  '(default: {})'.format(default_storage_dir)))

        generic_parser.add_argument(
            '--arch', help='The arch to build for.',
            default='armeabi-v7a')

        # Options for specifying the Distribution
        generic_parser.add_argument(
            '--dist-name', '--dist_name',
            help='The name of the distribution to use or create', default='')

        generic_parser.add_argument(
            '--requirements',
            help=('Dependencies of your app, should be recipe names or '
                  'Python modules. NOT NECESSARY if you are using '
                  'Python 3 with --use-setup-py'),
            default='')

        generic_parser.add_argument(
            '--recipe-blacklist',
            help=('Blacklist an internal recipe from use. Allows '
                  'disabling Python 3 core modules to save size'),
            dest="recipe_blacklist",
            default='')

        generic_parser.add_argument(
            '--blacklist-requirements',
            help=('Blacklist an internal recipe from use. Allows '
                  'disabling Python 3 core modules to save size'),
            dest="blacklist_requirements",
            default='')

        generic_parser.add_argument(
            '--bootstrap',
            help='The bootstrap to build with. Leave unset to choose '
                 'automatically.',
            default=None)

        generic_parser.add_argument(
            '--hook',
            help='Filename to a module that contains python-for-android hooks',
            default=None)

        add_boolean_option(
            generic_parser, ["force-build"],
            default=False,
            description='Whether to force compilation of a new distribution')

        add_boolean_option(
            generic_parser, ["require-perfect-match"],
            default=False,
            description=('Whether the dist recipes must perfectly match '
                         'those requested'))

        add_boolean_option(
            generic_parser, ["allow-replace-dist"],
            default=True,
            description='Whether existing dist names can be automatically replaced'
            )

        generic_parser.add_argument(
            '--local-recipes', '--local_recipes',
            dest='local_recipes', default='./p4a-recipes',
            help='Directory to look for local recipes')

        generic_parser.add_argument(
            '--java-build-tool',
            dest='java_build_tool', default='auto',
            choices=['auto', 'ant', 'gradle'],
            help=('The java build tool to use when packaging the APK, defaults '
                  'to automatically selecting an appropriate tool.'))

        add_boolean_option(
            generic_parser, ['copy-libs'],
            default=False,
            description='Copy libraries instead of using biglink (Android 4.3+)'
        )

        self._read_configuration()

        subparsers = parser.add_subparsers(dest='subparser_name',
                                           help='The command to run')

        def add_parser(subparsers, *args, **kwargs):
            """
            argparse in python2 doesn't support the aliases option,
            so we just don't provide the aliases there.
            """
            if 'aliases' in kwargs and sys.version_info.major < 3:
                kwargs.pop('aliases')
            return subparsers.add_parser(*args, **kwargs)

        add_parser(
            subparsers,
            'recommendations',
            parents=[generic_parser],
            help='List recommended p4a dependencies')
        parser_recipes = add_parser(
            subparsers,
            'recipes',
            parents=[generic_parser],
            help='List the available recipes')
        parser_recipes.add_argument(
            "--compact",
            action="store_true", default=False,
            help="Produce a compact list suitable for scripting")
        add_parser(
            subparsers, 'bootstraps',
            help='List the available bootstraps',
            parents=[generic_parser])
        add_parser(
            subparsers, 'clean_all',
            aliases=['clean-all'],
            help='Delete all builds, dists and caches',
            parents=[generic_parser])
        add_parser(
            subparsers, 'clean_dists',
            aliases=['clean-dists'],
            help='Delete all dists',
            parents=[generic_parser])
        add_parser(
            subparsers, 'clean_bootstrap_builds',
            aliases=['clean-bootstrap-builds'],
            help='Delete all bootstrap builds',
            parents=[generic_parser])
        add_parser(
            subparsers, 'clean_builds',
            aliases=['clean-builds'],
            help='Delete all builds',
            parents=[generic_parser])

        parser_clean = add_parser(
            subparsers, 'clean',
            help='Delete build components.',
            parents=[generic_parser])
        parser_clean.add_argument(
            'component', nargs='+',
            help=('The build component(s) to delete. You can pass any '
                  'number of arguments from "all", "builds", "dists", '
                  '"distributions", "bootstrap_builds", "downloads".'))

        parser_clean_recipe_build = add_parser(
            subparsers,
            'clean_recipe_build', aliases=['clean-recipe-build'],
            help=('Delete the build components of the given recipe. '
                  'By default this will also delete built dists'),
            parents=[generic_parser])
        parser_clean_recipe_build.add_argument(
            'recipe', help='The recipe name')
        parser_clean_recipe_build.add_argument(
            '--no-clean-dists', default=False,
            dest='no_clean_dists',
            action='store_true',
            help='If passed, do not delete existing dists')

        parser_clean_download_cache = add_parser(
            subparsers,
            'clean_download_cache', aliases=['clean-download-cache'],
            help='Delete cached downloads for requirement builds',
            parents=[generic_parser])
        parser_clean_download_cache.add_argument(
            'recipes',
            nargs='*',
            help='The recipes to clean (space-separated). If no recipe name is'
                  ' provided, the entire cache is cleared.')

        parser_export_dist = add_parser(
            subparsers,
            'export_dist', aliases=['export-dist'],
            help='Copy the named dist to the given path',
            parents=[generic_parser])
        parser_export_dist.add_argument('output_dir',
                                        help='The output dir to copy to')
        parser_export_dist.add_argument(
            '--symlink',
            action='store_true',
            help='Symlink the dist instead of copying')

        parser_apk = add_parser(
            subparsers,
            'apk', help='Build an APK',
            parents=[generic_parser])
        # This is actually an internal argument of the build.py
        # (see pythonforandroid/bootstraps/common/build/build.py).
        # However, it is also needed before the distribution is finally
        # assembled for locating the setup.py / other build systems, which
        # is why we also add it here:
        parser_apk.add_argument(
            '--private', dest='private',
            help='the directory with the app source code files' +
                 ' (containing your main.py entrypoint)',
            required=False, default=None)
        parser_apk.add_argument(
            '--release', dest='build_mode', action='store_const',
            const='release', default='debug',
            help='Build the PARSER_APK. in Release mode')
        parser_apk.add_argument(
            '--use-setup-py', dest="use_setup_py",
            action='store_true', default=False,
            help="Process the setup.py of a project if present. " +
                 "(Experimental!")
        parser_apk.add_argument(
            '--ignore-setup-py', dest="ignore_setup_py",
            action='store_true', default=False,
            help="Don't run the setup.py of a project if present. " +
                 "This may be required if the setup.py is not " +
                 "designed to work inside p4a (e.g. by installing " +
                 "dependencies that won't work or aren't desired " +
                 "on Android")
        parser_apk.add_argument(
            '--keystore', dest='keystore', action='store', default=None,
            help=('Keystore for JAR signing key, will use jarsigner '
                  'default if not specified (release build only)'))
        parser_apk.add_argument(
            '--signkey', dest='signkey', action='store', default=None,
            help='Key alias to sign PARSER_APK. with (release build only)')
        parser_apk.add_argument(
            '--keystorepw', dest='keystorepw', action='store', default=None,
            help='Password for keystore')
        parser_apk.add_argument(
            '--signkeypw', dest='signkeypw', action='store', default=None,
            help='Password for key alias')

        add_parser(
            subparsers,
            'create', help='Compile a set of requirements into a dist',
            parents=[generic_parser])
        add_parser(
            subparsers,
            'archs', help='List the available target architectures',
            parents=[generic_parser])
        add_parser(
            subparsers,
            'distributions', aliases=['dists'],
            help='List the currently available (compiled) dists',
            parents=[generic_parser])
        add_parser(
            subparsers,
            'delete_dist', aliases=['delete-dist'], help='Delete a compiled dist',
            parents=[generic_parser])

        parser_sdk_tools = add_parser(
            subparsers,
            'sdk_tools', aliases=['sdk-tools'],
            help='Run the given binary from the SDK tools dis',
            parents=[generic_parser])
        parser_sdk_tools.add_argument(
            'tool', help='The binary tool name to run')

        add_parser(
            subparsers,
            'adb', help='Run adb from the given SDK',
            parents=[generic_parser])
        add_parser(
            subparsers,
            'logcat', help='Run logcat from the given SDK',
            parents=[generic_parser])
        add_parser(
            subparsers,
            'build_status', aliases=['build-status'],
            help='Print some debug information about current built components',
            parents=[generic_parser])

        parser.add_argument('-v', '--version', action='version',
                            version=__version__)

        args, unknown = parser.parse_known_args(sys.argv[1:])
        args.unknown_args = unknown

        if hasattr(args, "private") and args.private is not None:
            # Pass this value on to the internal bootstrap build.py:
            args.unknown_args += ["--private", args.private]
        if hasattr(args, "ignore_setup_py") and args.ignore_setup_py:
            args.use_setup_py = False

        self.args = args

        if args.subparser_name is None:
            parser.print_help()
            exit(1)

        setup_color(args.color)

        if args.debug:
            logger.setLevel(logging.DEBUG)

        self.ctx = Context()
        self.ctx.use_setup_py = getattr(args, "use_setup_py", True)

        have_setup_py_or_similar = False
        if getattr(args, "private", None) is not None:
            project_dir = getattr(args, "private")
            if (os.path.exists(os.path.join(project_dir, "setup.py")) or
                    os.path.exists(os.path.join(project_dir,
                                                "pyproject.toml"))):
                have_setup_py_or_similar = True

        # Process requirements and put version in environ
        if hasattr(args, 'requirements'):
            requirements = []

            # Add dependencies from setup.py, but only if they are recipes
            # (because otherwise, setup.py itself will install them later)
            if (have_setup_py_or_similar and
                    getattr(args, "use_setup_py", False)):
                try:
                    info("Analyzing package dependencies. MAY TAKE A WHILE.")
                    # Get all the dependencies corresponding to a recipe:
                    dependencies = [
                        dep.lower() for dep in
                        get_dep_names_of_package(
                            args.private,
                            keep_version_pins=True,
                            recursive=True,
                            verbose=True,
                        )
                    ]
                    info("Dependencies obtained: " + str(dependencies))
                    all_recipes = [
                        recipe.lower() for recipe in
                        set(Recipe.list_recipes(self.ctx))
                    ]
                    dependencies = set(dependencies).intersection(
                        set(all_recipes)
                    )
                    # Add dependencies to argument list:
                    if len(dependencies) > 0:
                        if len(args.requirements) > 0:
                            args.requirements += u","
                        args.requirements += u",".join(dependencies)
                except ValueError:
                    # Not a python package, apparently.
                    warning(
                        "Processing failed, is this project a valid "
                        "package? Will continue WITHOUT setup.py deps."
                    )

            # Parse --requirements argument list:
            for requirement in split_argument_list(args.requirements):
                if "==" in requirement:
                    requirement, version = requirement.split(u"==", 1)
                    os.environ["VERSION_{}".format(requirement)] = version
                    info('Recipe {}: version "{}" requested'.format(
                        requirement, version))
                requirements.append(requirement)
            args.requirements = u",".join(requirements)

        self.warn_on_deprecated_args(args)

        self.storage_dir = args.storage_dir
        self.ctx.setup_dirs(self.storage_dir)
        self.sdk_dir = args.sdk_dir
        self.ndk_dir = args.ndk_dir
        self.android_api = args.android_api
        self.ndk_api = args.ndk_api
        self.ctx.symlink_java_src = args.symlink_java_src
        self.ctx.java_build_tool = args.java_build_tool

        self._archs = split_argument_list(args.arch)

        self.ctx.local_recipes = args.local_recipes
        self.ctx.copy_libs = args.copy_libs

        # Each subparser corresponds to a method
        getattr(self, args.subparser_name.replace('-', '_'))(args)
 def get_recipe_env(self, arch, with_flags_in_cc=True):
     """ Add libgeos headers to path """
     env = super(ShapelyRecipe, self).get_recipe_env(arch, with_flags_in_cc)
     libgeos_dir = Recipe.get_recipe('libgeos', self.ctx).get_build_dir(arch.arch)
     env['CFLAGS'] += " -I{}/dist/include".format(libgeos_dir)
     return env
Ejemplo n.º 45
0
def get_recipe_order_and_bootstrap(ctx, names, bs=None):
    '''Takes a list of recipe names and (optionally) a bootstrap. Then
    works out the dependency graph (including bootstrap recipes if
    necessary). Finally, if no bootstrap was initially selected,
    chooses one that supports all the recipes.
    '''
    graph = Graph()
    recipes_to_load = set(names)
    if bs is not None and bs.recipe_depends:
        info_notify('Bootstrap requires recipes {}'.format(bs.recipe_depends))
        recipes_to_load = recipes_to_load.union(set(bs.recipe_depends))
    recipes_to_load = list(recipes_to_load)
    recipe_loaded = []
    python_modules = []
    while recipes_to_load:
        name = recipes_to_load.pop(0)
        if name in recipe_loaded or isinstance(name, (list, tuple)):
            continue
        try:
            recipe = Recipe.get_recipe(name, ctx)
        except IOError:
            info('No recipe named {}; will attempt to install with pip'.format(
                name))
            python_modules.append(name)
            continue
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            warning('Failed to import recipe named {}; the recipe exists '
                    'but appears broken.'.format(name))
            warning('Exception was:')
            raise
        graph.add(name, name)
        info('Loaded recipe {} (depends on {}{})'.format(
            name, recipe.depends, ', conflicts {}'.format(recipe.conflicts)
            if recipe.conflicts else ''))
        for depend in recipe.depends:
            graph.add(name, depend)
            recipes_to_load += recipe.depends
        for conflict in recipe.conflicts:
            if graph.conflicts(conflict):
                warning(('{} conflicts with {}, but both have been '
                         'included or pulled into the requirements.'.format(
                             recipe.name, conflict)))
                warning(
                    'Due to this conflict the build cannot continue, exiting.')
                exit(1)
        python_modules += recipe.python_depends
        recipe_loaded.append(name)
    graph.remove_remaining_conflicts(ctx)
    if len(graph.graphs) > 1:
        info('Found multiple valid recipe sets:')
        for g in graph.graphs:
            info('    {}'.format(g.keys()))
        info_notify('Using the first of these: {}'.format(
            graph.graphs[0].keys()))
    elif len(graph.graphs) == 0:
        warning('Didn\'t find any valid dependency graphs, exiting.')
        exit(1)
    else:
        info('Found a single valid recipe set (this is good)')

    build_order = list(graph.find_order(0))
    if bs is None:  # It would be better to check against possible
        # orders other than the first one, but in practice
        # there will rarely be clashes, and the user can
        # specify more parameters if necessary to resolve
        # them.
        bs = Bootstrap.get_bootstrap_from_recipes(build_order, ctx)
        if bs is None:
            info('Could not find a bootstrap compatible with the '
                 'required recipes.')
            info('If you think such a combination should exist, try '
                 'specifying the bootstrap manually with --bootstrap.')
            exit(1)
        info('{} bootstrap appears compatible with the required recipes.'.
             format(bs.name))
        info('Checking this...')
        recipes_to_load = bs.recipe_depends
        # This code repeats the code from earlier! Should move to a function:
        while recipes_to_load:
            name = recipes_to_load.pop(0)
            if name in recipe_loaded or isinstance(name, (list, tuple)):
                continue
            try:
                recipe = Recipe.get_recipe(name, ctx)
            except ImportError:
                info('No recipe named {}; will attempt to install with pip'.
                     format(name))
                python_modules.append(name)
                continue
            graph.add(name, name)
            info('Loaded recipe {} (depends on {}{})'.format(
                name, recipe.depends, ', conflicts {}'.format(recipe.conflicts)
                if recipe.conflicts else ''))
            for depend in recipe.depends:
                graph.add(name, depend)
                recipes_to_load += recipe.depends
            for conflict in recipe.conflicts:
                if graph.conflicts(conflict):
                    warning(
                        ('{} conflicts with {}, but both have been '
                         'included or pulled into the requirements.'.format(
                             recipe.name, conflict)))
                    warning('Due to this conflict the build cannot continue, '
                            'exiting.')
                    exit(1)
            recipe_loaded.append(name)
        graph.remove_remaining_conflicts(ctx)
        build_order = list(graph.find_order(0))
    return build_order, python_modules, bs
Ejemplo n.º 46
0
 def test_postarch_build(self, mock_install_stl_lib):
     arch = ArchAarch_64(self.ctx)
     recipe = Recipe.get_recipe('icu', self.ctx)
     assert recipe.need_stl_shared, True
     recipe.postbuild_arch(arch)
     mock_install_stl_lib.assert_called_once_with(arch)
Ejemplo n.º 47
0
    def do_python_build(self, arch):
        if 'sqlite' in self.ctx.recipe_build_order:
            print('sqlite support not yet enabled in python recipe')
            exit(1)

        hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
        shprint(sh.cp, self.ctx.hostpython, self.get_build_dir(arch.arch))
        shprint(sh.cp, self.ctx.hostpgen, self.get_build_dir(arch.arch))
        hostpython = join(self.get_build_dir(arch.arch), 'hostpython')
        hostpgen = join(self.get_build_dir(arch.arch), 'hostpython')

        with current_directory(self.get_build_dir(arch.arch)):


            hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
            shprint(sh.cp, join(hostpython_recipe.get_recipe_dir(), 'Setup'), 'Modules')

            env = arch.get_env()

            # AND: Should probably move these to get_recipe_env for
            # neatness, but the whole recipe needs tidying along these
            # lines
            env['HOSTARCH'] = 'arm-eabi'
            env['BUILDARCH'] = shprint(sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0]
            env['CFLAGS'] = ' '.join([env['CFLAGS'], '-DNO_MALLINFO'])

            # TODO need to add a should_build that checks if optional
            # dependencies have changed (possibly in a generic way)
            if 'openssl' in self.ctx.recipe_build_order:
                openssl_build_dir = Recipe.get_recipe('openssl', self.ctx).get_build_dir(arch.arch)
                setuplocal = join('Modules', 'Setup.local')
                shprint(sh.cp, join(self.get_recipe_dir(), 'Setup.local-ssl'), setuplocal)
                shprint(sh.sed, '-i', 's#^SSL=.*#SSL={}#'.format(openssl_build_dir), setuplocal)

            configure = sh.Command('./configure')
            # AND: OFLAG isn't actually set, should it be?
            shprint(configure,
                    '--host={}'.format(env['HOSTARCH']),
                    '--build={}'.format(env['BUILDARCH']),
                    # 'OPT={}'.format(env['OFLAG']),
                    '--prefix={}'.format(realpath('./python-install')),
                    '--enable-shared',
                    '--disable-toolbox-glue',
                    '--disable-framework',
                    _env=env)

            # AND: tito left this comment in the original source. It's still true!
            # FIXME, the first time, we got a error at:
            # python$EXE ../../Tools/scripts/h2py.py -i '(u_long)' /usr/include/netinet/in.h
        # /home/tito/code/python-for-android/build/python/Python-2.7.2/python: 1: Syntax error: word unexpected (expecting ")")
            # because at this time, python is arm, not x86. even that, why /usr/include/netinet/in.h is used ?
            # check if we can avoid this part.

            make = sh.Command(env['MAKE'].split(' ')[0])
            print('First install (expected to fail...')
            try:
                shprint(make, '-j5', 'install', 'HOSTPYTHON={}'.format(hostpython),
                        'HOSTPGEN={}'.format(hostpgen),
                        'CROSS_COMPILE_TARGET=yes',
                        'INSTSONAME=libpython2.7.so',
                        _env=env)
            except sh.ErrorReturnCode_2:
                print('First python2 make failed. This is expected, trying again.')


            print('Second install (expected to work)')
            shprint(sh.touch, 'python.exe', 'python')
            shprint(make, '-j5', 'install', 'HOSTPYTHON={}'.format(hostpython),
                    'HOSTPGEN={}'.format(hostpgen),
                    'CROSS_COMPILE_TARGET=yes',
                    'INSTSONAME=libpython2.7.so',
                    _env=env)

            if is_darwin():
                shprint(sh.cp, join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'Lib'))
                shprint(sh.cp, join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'lib', 'python2.7'))

            # reduce python
            for dir_name in ('test', join('json', 'tests'), 'lib-tk',
                             join('sqlite3', 'test'), join('unittest, test'),
                             join('lib2to3', 'tests'), join('bsddb', 'tests'),
                             join('distutils', 'tests'), join('email', 'test'),
                             'curses'):
                shprint(sh.rm, '-rf', join('python-install',
                                           'lib', 'python2.7', dir_name))
Ejemplo n.º 48
0
    def set_libs_flags(self, env, arch):
        '''Takes care to properly link libraries with python depending on our
        requirements and the attribute :attr:`opt_depends`.
        '''
        def add_flags(include_flags, link_dirs, link_libs):
            env['CPPFLAGS'] = env.get('CPPFLAGS', '') + include_flags
            env['LDFLAGS'] = env.get('LDFLAGS', '') + link_dirs
            env['LIBS'] = env.get('LIBS', '') + link_libs

        if 'sqlite3' in self.ctx.recipe_build_order:
            info('Activating flags for sqlite3')
            recipe = Recipe.get_recipe('sqlite3', self.ctx)
            add_flags(' -I' + recipe.get_build_dir(arch.arch),
                      ' -L' + recipe.get_lib_dir(arch), ' -lsqlite3')

        if 'libffi' in self.ctx.recipe_build_order:
            info('Activating flags for libffi')
            recipe = Recipe.get_recipe('libffi', self.ctx)
            # In order to force the correct linkage for our libffi library, we
            # set the following variable to point where is our libffi.pc file,
            # because the python build system uses pkg-config to configure it.
            env['PKG_CONFIG_PATH'] = recipe.get_build_dir(arch.arch)
            add_flags(' -I' + ' -I'.join(recipe.get_include_dirs(arch)),
                      ' -L' + join(recipe.get_build_dir(arch.arch), '.libs'),
                      ' -lffi')

        if 'openssl' in self.ctx.recipe_build_order:
            info('Activating flags for openssl')
            recipe = Recipe.get_recipe('openssl', self.ctx)
            add_flags(recipe.include_flags(arch), recipe.link_dirs_flags(arch),
                      recipe.link_libs_flags())

        for library_name in {'libbz2', 'liblzma'}:
            if library_name in self.ctx.recipe_build_order:
                info(f'Activating flags for {library_name}')
                recipe = Recipe.get_recipe(library_name, self.ctx)
                add_flags(recipe.get_library_includes(arch),
                          recipe.get_library_ldflags(arch),
                          recipe.get_library_libs_flag())

        # python build system contains hardcoded zlib version which prevents
        # the build of zlib module, here we search for android's zlib version
        # and sets the right flags, so python can be build with android's zlib
        info("Activating flags for android's zlib")
        zlib_lib_path = join(self.ctx.ndk_platform, 'usr', 'lib')
        zlib_includes = join(self.ctx.ndk_dir, 'sysroot', 'usr', 'include')
        zlib_h = join(zlib_includes, 'zlib.h')
        try:
            with open(zlib_h) as fileh:
                zlib_data = fileh.read()
        except IOError:
            raise BuildInterruptingException(
                "Could not determine android's zlib version, no zlib.h ({}) in"
                " the NDK dir includes".format(zlib_h))
        for line in zlib_data.split('\n'):
            if line.startswith('#define ZLIB_VERSION '):
                break
        else:
            raise BuildInterruptingException(
                'Could not parse zlib.h...so we cannot find zlib version,'
                'required by python build,')
        env['ZLIB_VERSION'] = line.replace('#define ZLIB_VERSION ', '')
        add_flags(' -I' + zlib_includes, ' -L' + zlib_lib_path, ' -lz')

        return env
Ejemplo n.º 49
0
    def get_env(self, with_flags_in_cc=True, clang=False):
        env = {}

        cflags = [
            '-DANDROID', '-fomit-frame-pointer',
            '-D__ANDROID_API__={}'.format(self.ctx.ndk_api)
        ]
        if not clang:
            cflags.append('-mandroid')
        else:
            cflags.append('-target ' + self.target)
            toolchain = '{android_host}-{toolchain_version}'.format(
                android_host=self.ctx.toolchain_prefix,
                toolchain_version=self.ctx.toolchain_version)
            toolchain = join(self.ctx.ndk_dir, 'toolchains', toolchain,
                             'prebuilt', 'linux-x86_64')
            cflags.append('-gcc-toolchain {}'.format(toolchain))

        env['CFLAGS'] = ' '.join(cflags)
        env['LDFLAGS'] = ' '

        sysroot = join(self.ctx._ndk_dir, 'sysroot')
        if exists(sysroot):
            # post-15 NDK per
            # https://android.googlesource.com/platform/ndk/+/ndk-r15-release/docs/UnifiedHeaders.md
            env['CFLAGS'] += ' -isystem {}/sysroot/usr/include/{}'.format(
                self.ctx.ndk_dir, self.ctx.toolchain_prefix)
            env['CFLAGS'] += ' -I{}/sysroot/usr/include/{}'.format(
                self.ctx.ndk_dir, self.command_prefix)
        else:
            sysroot = self.ctx.ndk_platform
            env['CFLAGS'] += ' -I{}'.format(self.ctx.ndk_platform)
        env['CFLAGS'] += ' -isysroot {} '.format(sysroot)
        env['CFLAGS'] += '-I' + join(
            self.ctx.get_python_install_dir(), 'include/python{}'.format(
                self.ctx.python_recipe.version[0:3]))

        env['LDFLAGS'] += '--sysroot {} '.format(self.ctx.ndk_platform)

        env["CXXFLAGS"] = env["CFLAGS"]

        env["LDFLAGS"] += " ".join(
            ['-lm', '-L' + self.ctx.get_libs_dir(self.arch)])

        if self.ctx.ndk == 'crystax':
            env['LDFLAGS'] += ' -L{}/sources/crystax/libs/{} -lcrystax'.format(
                self.ctx.ndk_dir, self.arch)

        py_platform = sys.platform
        if py_platform in ['linux2', 'linux3']:
            py_platform = 'linux'

        toolchain_prefix = self.ctx.toolchain_prefix
        toolchain_version = self.ctx.toolchain_version
        command_prefix = self.command_prefix

        env['TOOLCHAIN_PREFIX'] = toolchain_prefix
        env['TOOLCHAIN_VERSION'] = toolchain_version

        ccache = ''
        if self.ctx.ccache and bool(int(environ.get('USE_CCACHE', '1'))):
            # print('ccache found, will optimize builds')
            ccache = self.ctx.ccache + ' '
            env['USE_CCACHE'] = '1'
            env['NDK_CCACHE'] = self.ctx.ccache
            env.update(
                {k: v
                 for k, v in environ.items() if k.startswith('CCACHE_')})

        if clang:
            llvm_dirname = split(
                glob(join(self.ctx.ndk_dir, 'toolchains', 'llvm*'))[-1])[-1]
            clang_path = join(self.ctx.ndk_dir, 'toolchains', llvm_dirname,
                              'prebuilt', 'linux-x86_64', 'bin')
            environ['PATH'] = '{clang_path}:{path}'.format(
                clang_path=clang_path, path=environ['PATH'])
            exe = join(clang_path, 'clang')
            execxx = join(clang_path, 'clang++')
        else:
            exe = '{command_prefix}-gcc'.format(command_prefix=command_prefix)
            execxx = '{command_prefix}-g++'.format(
                command_prefix=command_prefix)

        cc = find_executable(exe, path=environ['PATH'])
        if cc is None:
            print('Searching path are: {!r}'.format(environ['PATH']))
            raise BuildInterruptingException(
                'Couldn\'t find executable for CC. This indicates a '
                'problem locating the {} executable in the Android '
                'NDK, not that you don\'t have a normal compiler '
                'installed. Exiting.')

        if with_flags_in_cc:
            env['CC'] = '{ccache}{exe} {cflags}'.format(exe=exe,
                                                        ccache=ccache,
                                                        cflags=env['CFLAGS'])
            env['CXX'] = '{ccache}{execxx} {cxxflags}'.format(
                execxx=execxx, ccache=ccache, cxxflags=env['CXXFLAGS'])
        else:
            env['CC'] = '{ccache}{exe}'.format(exe=exe, ccache=ccache)
            env['CXX'] = '{ccache}{execxx}'.format(execxx=execxx,
                                                   ccache=ccache)

        env['AR'] = '{}-ar'.format(command_prefix)
        env['RANLIB'] = '{}-ranlib'.format(command_prefix)
        env['LD'] = '{}-ld'.format(command_prefix)
        env['LDSHARED'] = env["CC"] + " -pthread -shared " +\
            "-Wl,-O1 -Wl,-Bsymbolic-functions "
        if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax:
            # For crystax python, we can't use the host python headers:
            env["CFLAGS"] += ' -I{}/sources/python/{}/include/python/'.\
                format(self.ctx.ndk_dir, self.ctx.python_recipe.version[0:3])
        env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix)
        env['MAKE'] = 'make -j5'
        env['READELF'] = '{}-readelf'.format(command_prefix)
        env['NM'] = '{}-nm'.format(command_prefix)

        hostpython_recipe = Recipe.get_recipe(
            'host' + self.ctx.python_recipe.name, self.ctx)
        env['BUILDLIB_PATH'] = join(
            hostpython_recipe.get_build_dir(self.arch), 'build',
            'lib.linux-{}-{}'.format(
                uname()[-1],
                self.ctx.python_recipe.major_minor_version_string))

        env['PATH'] = environ['PATH']

        env['ARCH'] = self.arch
        env['NDK_API'] = 'android-{}'.format(str(self.ctx.ndk_api))

        if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax:
            env['CRYSTAX_PYTHON_VERSION'] = self.ctx.python_recipe.version

        return env
Ejemplo n.º 50
0
    def get_env(self, with_flags_in_cc=True):
        env = {}

        # CFLAGS/CXXFLAGS: the processor flags
        env['CFLAGS'] = ' '.join(self.common_cflags).format(target=self.target)
        if self.arch_cflags:
            # each architecture may have has his own CFLAGS
            env['CFLAGS'] += ' ' + ' '.join(self.arch_cflags)
        env['CXXFLAGS'] = env['CFLAGS']

        # CPPFLAGS (for macros and includes)
        env['CPPFLAGS'] = ' '.join(self.common_cppflags).format(
            ctx=self.ctx,
            command_prefix=self.command_prefix,
            python_includes=join(
                self.ctx.get_python_install_dir(),
                'include/python{}'.format(self.ctx.python_recipe.version[0:3]),
            ),
        )

        # LDFLAGS: Link the extra global link paths first before anything else
        # (such that overriding system libraries with them is possible)
        env['LDFLAGS'] = (
            ' ' + " ".join([
                "-L'" + link_path.replace("'", "'\"'\"'") +
                "'"  # no shlex.quote in py2
                for link_path in self.extra_global_link_paths
            ]) + ' ' + ' '.join(self.common_ldflags).format(
                ctx_libs_dir=self.ctx.get_libs_dir(self.arch)))

        # LDLIBS: Library flags or names given to compilers when they are
        # supposed to invoke the linker.
        env['LDLIBS'] = ' '.join(self.common_ldlibs)

        # CCACHE
        ccache = ''
        if self.ctx.ccache and bool(int(environ.get('USE_CCACHE', '1'))):
            # print('ccache found, will optimize builds')
            ccache = self.ctx.ccache + ' '
            env['USE_CCACHE'] = '1'
            env['NDK_CCACHE'] = self.ctx.ccache
            env.update(
                {k: v
                 for k, v in environ.items() if k.startswith('CCACHE_')})

        # Compiler: `CC` and `CXX` (and make sure that the compiler exists)
        environ['PATH'] = '{clang_path}:{path}'.format(
            clang_path=self.clang_path, path=environ['PATH'])
        cc = find_executable(self.clang_exe, path=environ['PATH'])
        if cc is None:
            print('Searching path are: {!r}'.format(environ['PATH']))
            raise BuildInterruptingException(
                'Couldn\'t find executable for CC. This indicates a '
                'problem locating the {} executable in the Android '
                'NDK, not that you don\'t have a normal compiler '
                'installed. Exiting.'.format(self.clang_exe))

        if with_flags_in_cc:
            env['CC'] = '{ccache}{exe} {cflags}'.format(exe=self.clang_exe,
                                                        ccache=ccache,
                                                        cflags=env['CFLAGS'])
            env['CXX'] = '{ccache}{execxx} {cxxflags}'.format(
                execxx=self.clang_exe_cxx,
                ccache=ccache,
                cxxflags=env['CXXFLAGS'])
        else:
            env['CC'] = '{ccache}{exe}'.format(exe=self.clang_exe,
                                               ccache=ccache)
            env['CXX'] = '{ccache}{execxx}'.format(execxx=self.clang_exe_cxx,
                                                   ccache=ccache)

        # Android's binaries
        command_prefix = self.command_prefix
        env['AR'] = '{}-ar'.format(command_prefix)
        env['RANLIB'] = '{}-ranlib'.format(command_prefix)
        env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix)
        env['MAKE'] = 'make -j{}'.format(str(cpu_count()))
        env['READELF'] = '{}-readelf'.format(command_prefix)
        env['NM'] = '{}-nm'.format(command_prefix)
        env['LD'] = '{}-ld'.format(command_prefix)

        # Android's arch/toolchain
        env['ARCH'] = self.arch
        env['NDK_API'] = 'android-{}'.format(str(self.ctx.ndk_api))
        env['TOOLCHAIN_PREFIX'] = self.ctx.toolchain_prefix
        env['TOOLCHAIN_VERSION'] = self.ctx.toolchain_version

        # Custom linker options
        env['LDSHARED'] = env['CC'] + ' ' + ' '.join(self.common_ldshared)

        # Host python (used by some recipes)
        hostpython_recipe = Recipe.get_recipe(
            'host' + self.ctx.python_recipe.name, self.ctx)
        env['BUILDLIB_PATH'] = join(
            hostpython_recipe.get_build_dir(self.arch),
            'native-build',
            'build',
            'lib.{}-{}'.format(
                build_platform,
                self.ctx.python_recipe.major_minor_version_string,
            ),
        )

        env['PATH'] = environ['PATH']

        # for reproducible builds
        if 'SOURCE_DATE_EPOCH' in environ:
            for k in 'LC_ALL TZ SOURCE_DATE_EPOCH PYTHONHASHSEED BUILD_DATE BUILD_TIME'.split(
            ):
                if k in environ:
                    env[k] = environ[k]

        return env
Ejemplo n.º 51
0
def build_recipes(build_order,
                  python_modules,
                  ctx,
                  project_dir,
                  ignore_project_setup_py=False):
    # Put recipes in correct build order
    info_notify("Recipe build order is {}".format(build_order))
    if python_modules:
        python_modules = sorted(set(python_modules))
        info_notify(
            ("The requirements ({}) were not found as recipes, they will be "
             "installed with pip.").format(", ".join(python_modules)))

    recipes = [Recipe.get_recipe(name, ctx) for name in build_order]

    # download is arch independent
    info_main("# Downloading recipes ")
    for recipe in recipes:
        recipe.download_if_necessary()

    for arch in ctx.archs:
        info_main("# Building all recipes for arch {}".format(arch.arch))

        info_main("# Unpacking recipes")
        for recipe in recipes:
            ensure_dir(recipe.get_build_container_dir(arch.arch))
            recipe.prepare_build_dir(arch.arch)

        info_main("# Prebuilding recipes")
        # 2) prebuild packages
        for recipe in recipes:
            info_main("Prebuilding {} for {}".format(recipe.name, arch.arch))
            recipe.prebuild_arch(arch)
            recipe.apply_patches(arch)

        # 3) build packages
        info_main("# Building recipes")
        for recipe in recipes:
            info_main("Building {} for {}".format(recipe.name, arch.arch))
            if recipe.should_build(arch):
                recipe.build_arch(arch)
            else:
                info("{} said it is already built, skipping".format(
                    recipe.name))
            recipe.install_libraries(arch)

        # 4) biglink everything
        info_main("# Biglinking object files")
        if not ctx.python_recipe:
            biglink(ctx, arch)
        else:
            warning("Context's python recipe found, "
                    "skipping biglink (will this work?)")

        # 5) postbuild packages
        info_main("# Postbuilding recipes")
        for recipe in recipes:
            info_main("Postbuilding {} for {}".format(recipe.name, arch.arch))
            recipe.postbuild_arch(arch)

    info_main("# Installing pure Python modules")
    run_pymodules_install(ctx,
                          python_modules,
                          project_dir,
                          ignore_setup_py=ignore_project_setup_py)
Ejemplo n.º 52
0
    def do_python_build(self, arch):

        hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
        shprint(sh.cp, self.ctx.hostpython, self.get_build_dir(arch.arch))
        shprint(sh.cp, self.ctx.hostpgen, self.get_build_dir(arch.arch))
        hostpython = join(self.get_build_dir(arch.arch), 'hostpython')
        hostpgen = join(self.get_build_dir(arch.arch), 'hostpython')

        with current_directory(self.get_build_dir(arch.arch)):

            hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)
            shprint(sh.cp, join(hostpython_recipe.get_recipe_dir(), 'Setup'),
                    'Modules')

            env = arch.get_env()

            env['HOSTARCH'] = 'arm-eabi'
            env['BUILDARCH'] = shprint(
                sh.gcc, '-dumpmachine').stdout.decode('utf-8').split('\n')[0]
            env['CFLAGS'] = ' '.join([env['CFLAGS'], '-DNO_MALLINFO'])

            # TODO need to add a should_build that checks if optional
            # dependencies have changed (possibly in a generic way)
            if 'openssl' in self.ctx.recipe_build_order:
                recipe = Recipe.get_recipe('openssl', self.ctx)
                openssl_build_dir = recipe.get_build_dir(arch.arch)
                setuplocal = join('Modules', 'Setup.local')
                shprint(sh.cp, join(self.get_recipe_dir(), 'Setup.local-ssl'),
                        setuplocal)
                shprint(sh.sed, '-i.backup',
                        's#^SSL=.*#SSL={}#'.format(openssl_build_dir),
                        setuplocal)
                env['OPENSSL_VERSION'] = recipe.version

            if 'sqlite3' in self.ctx.recipe_build_order:
                # Include sqlite3 in python2 build
                recipe = Recipe.get_recipe('sqlite3', self.ctx)
                include = ' -I' + recipe.get_build_dir(arch.arch)
                lib = ' -L' + recipe.get_lib_dir(arch) + ' -lsqlite3'
                # Insert or append to env
                flag = 'CPPFLAGS'
                env[flag] = env[flag] + include if flag in env else include
                flag = 'LDFLAGS'
                env[flag] = env[flag] + lib if flag in env else lib

            # NDK has langinfo.h but doesn't define nl_langinfo()
            env['ac_cv_header_langinfo_h'] = 'no'
            configure = sh.Command('./configure')
            shprint(
                configure,
                '--host={}'.format(env['HOSTARCH']),
                '--build={}'.format(env['BUILDARCH']),
                # 'OPT={}'.format(env['OFLAG']),
                '--prefix={}'.format(realpath('./python-install')),
                '--enable-shared',
                '--disable-toolbox-glue',
                '--disable-framework',
                _env=env)

            # tito left this comment in the original source. It's still true!
            # FIXME, the first time, we got a error at:
            # python$EXE ../../Tools/scripts/h2py.py -i '(u_long)' /usr/include/netinet/in.h
            # /home/tito/code/python-for-android/build/python/Python-2.7.2/python: 1: Syntax error: word unexpected (expecting ")")
            # because at this time, python is arm, not x86. even that, why /usr/include/netinet/in.h is used ?
            # check if we can avoid this part.

            make = sh.Command(env['MAKE'].split(' ')[0])
            print('First install (expected to fail...')
            try:
                shprint(make,
                        '-j5',
                        'install',
                        'HOSTPYTHON={}'.format(hostpython),
                        'HOSTPGEN={}'.format(hostpgen),
                        'CROSS_COMPILE_TARGET=yes',
                        'INSTSONAME=libpython2.7.so',
                        _env=env)
            except sh.ErrorReturnCode_2:
                print(
                    'First python2 make failed. This is expected, trying again.'
                )

            print('Second install (expected to work)')
            shprint(sh.touch, 'python.exe', 'python')
            shprint(make,
                    '-j5',
                    'install',
                    'HOSTPYTHON={}'.format(hostpython),
                    'HOSTPGEN={}'.format(hostpgen),
                    'CROSS_COMPILE_TARGET=yes',
                    'INSTSONAME=libpython2.7.so',
                    _env=env)

            if is_darwin():
                shprint(sh.cp,
                        join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'Lib'))
                shprint(sh.cp,
                        join(self.get_recipe_dir(), 'patches', '_scproxy.py'),
                        join('python-install', 'lib', 'python2.7'))

            # reduce python
            for dir_name in ('test', join('json', 'tests'), 'lib-tk',
                             join('sqlite3', 'test'), join('unittest, test'),
                             join('lib2to3', 'tests'), join('bsddb', 'tests'),
                             join('distutils',
                                  'tests'), join('email', 'test'), 'curses'):
                shprint(sh.rm, '-rf',
                        join('python-install', 'lib', 'python2.7', dir_name))
Ejemplo n.º 53
0
    def get_env(self, with_flags_in_cc=True):
        env = {}

        env['CFLAGS'] = ' '.join([
            '-DANDROID', '-mandroid', '-fomit-frame-pointer'
            ' -D__ANDROID_API__={}'.format(self.ctx._android_api),
           ])
        env['LDFLAGS'] = ' '

        sysroot = join(self.ctx._ndk_dir, 'sysroot')
        if exists(sysroot):
            # post-15 NDK per
            # https://android.googlesource.com/platform/ndk/+/ndk-r15-release/docs/UnifiedHeaders.md
            env['CFLAGS'] += ' -isystem {}/sysroot/usr/include/{}'.format(
                self.ctx.ndk_dir, self.ctx.toolchain_prefix)
        else:
            sysroot = self.ctx.ndk_platform
            env['CFLAGS'] += ' -I{}'.format(self.ctx.ndk_platform)
        env['CFLAGS'] += ' -isysroot {} '.format(sysroot)
        env['CFLAGS'] += '-I' + join(self.ctx.get_python_install_dir(),
                                     'include/python{}'.format(
                                         self.ctx.python_recipe.version[0:3])
                                    )

        env['LDFLAGS'] += '--sysroot {} '.format(self.ctx.ndk_platform)

        env["CXXFLAGS"] = env["CFLAGS"]

        env["LDFLAGS"] += " ".join(['-lm', '-L' + self.ctx.get_libs_dir(self.arch)])

        if self.ctx.ndk == 'crystax':
            env['LDFLAGS'] += ' -L{}/sources/crystax/libs/{} -lcrystax'.format(self.ctx.ndk_dir, self.arch)

        py_platform = sys.platform
        if py_platform in ['linux2', 'linux3']:
            py_platform = 'linux'

        toolchain_prefix = self.ctx.toolchain_prefix
        toolchain_version = self.ctx.toolchain_version
        command_prefix = self.command_prefix

        env['TOOLCHAIN_PREFIX'] = toolchain_prefix
        env['TOOLCHAIN_VERSION'] = toolchain_version

        ccache = ''
        if self.ctx.ccache and bool(int(environ.get('USE_CCACHE', '1'))):
            # print('ccache found, will optimize builds')
            ccache = self.ctx.ccache + ' '
            env['USE_CCACHE'] = '1'
            env['NDK_CCACHE'] = self.ctx.ccache
            env.update({k: v for k, v in environ.items() if k.startswith('CCACHE_')})

        cc = find_executable('{command_prefix}-gcc'.format(
            command_prefix=command_prefix), path=environ['PATH'])
        if cc is None:
            print('Searching path are: {!r}'.format(environ['PATH']))
            warning('Couldn\'t find executable for CC. This indicates a '
                    'problem locating the {} executable in the Android '
                    'NDK, not that you don\'t have a normal compiler '
                    'installed. Exiting.')
            exit(1)

        if with_flags_in_cc:
            env['CC'] = '{ccache}{command_prefix}-gcc {cflags}'.format(
                command_prefix=command_prefix,
                ccache=ccache,
                cflags=env['CFLAGS'])
            env['CXX'] = '{ccache}{command_prefix}-g++ {cxxflags}'.format(
                command_prefix=command_prefix,
                ccache=ccache,
                cxxflags=env['CXXFLAGS'])
        else:
            env['CC'] = '{ccache}{command_prefix}-gcc'.format(
                command_prefix=command_prefix,
                ccache=ccache)
            env['CXX'] = '{ccache}{command_prefix}-g++'.format(
                command_prefix=command_prefix,
                ccache=ccache)

        env['AR'] = '{}-ar'.format(command_prefix)
        env['RANLIB'] = '{}-ranlib'.format(command_prefix)
        env['LD'] = '{}-ld'.format(command_prefix)
        # env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink')
        # env['LDSHARED'] = env['LD']
        env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix)
        env['MAKE'] = 'make -j5'
        env['READELF'] = '{}-readelf'.format(command_prefix)
        env['NM'] = '{}-nm'.format(command_prefix)

        hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)

        # AND: This hardcodes python version 2.7, needs fixing
        env['BUILDLIB_PATH'] = join(
            hostpython_recipe.get_build_dir(self.arch),
            'build', 'lib.linux-{}-2.7'.format(uname()[-1]))

        env['PATH'] = environ['PATH']

        env['ARCH'] = self.arch

        if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax:
            env['CRYSTAX_PYTHON_VERSION'] = self.ctx.python_recipe.version

        return env
Ejemplo n.º 54
0
def build_recipes(build_order,
                  python_modules,
                  ctx,
                  project_dir,
                  ignore_project_setup_py=False):
    # Put recipes in correct build order
    info_notify("Recipe build order is {}".format(build_order))
    if python_modules:
        python_modules = sorted(set(python_modules))
        info_notify(
            ('The requirements ({}) were not found as recipes, they will be '
             'installed with pip.').format(', '.join(python_modules)))

    recipes = [Recipe.get_recipe(name, ctx) for name in build_order]

    # download is arch independent
    info_main('# Downloading recipes ')
    for recipe in recipes:
        recipe.download_if_necessary()

    for arch in ctx.archs:
        info_main('# Building all recipes for arch {}'.format(arch.arch))

        info_main('# Unpacking recipes')
        for recipe in recipes:
            ensure_dir(recipe.get_build_container_dir(arch.arch))
            recipe.prepare_build_dir(arch.arch)

        info_main('# Prebuilding recipes')
        # 2) prebuild packages
        for recipe in recipes:
            info_main('Prebuilding {} for {}'.format(recipe.name, arch.arch))
            recipe.prebuild_arch(arch)
            recipe.apply_patches(arch)

        # 3) build packages
        info_main('# Building recipes')
        for recipe in recipes:
            info_main('Building {} for {}'.format(recipe.name, arch.arch))
            if recipe.should_build(arch):
                recipe.build_arch(arch)
            else:
                info('{} said it is already built, skipping'.format(
                    recipe.name))

        # 4) biglink everything
        info_main('# Biglinking object files')
        if not ctx.python_recipe or not ctx.python_recipe.from_crystax:
            biglink(ctx, arch)
        else:
            info('NDK is crystax, skipping biglink (will this work?)')

        # 5) postbuild packages
        info_main('# Postbuilding recipes')
        for recipe in recipes:
            info_main('Postbuilding {} for {}'.format(recipe.name, arch.arch))
            recipe.postbuild_arch(arch)

    info_main('# Installing pure Python modules')
    run_pymodules_install(ctx,
                          python_modules,
                          project_dir,
                          ignore_setup_py=ignore_project_setup_py)

    return
Ejemplo n.º 55
0
    def get_env(self, with_flags_in_cc=True):
        env = {}

        env["CFLAGS"] = " ".join([
            "-DANDROID", "-mandroid", "-fomit-frame-pointer", "--sysroot",
            self.ctx.ndk_platform
        ])

        env["CXXFLAGS"] = env["CFLAGS"]

        env["LDFLAGS"] = " ".join(
            ['-lm', '-L' + self.ctx.get_libs_dir(self.arch)])

        if self.ctx.ndk == 'crystax':
            env['LDFLAGS'] += ' -L{}/sources/crystax/libs/{} -lcrystax'.format(
                self.ctx.ndk_dir, self.arch)

        py_platform = sys.platform
        if py_platform in ['linux2', 'linux3']:
            py_platform = 'linux'

        toolchain_prefix = self.ctx.toolchain_prefix
        toolchain_version = self.ctx.toolchain_version
        command_prefix = self.command_prefix

        env['TOOLCHAIN_PREFIX'] = toolchain_prefix
        env['TOOLCHAIN_VERSION'] = toolchain_version

        ccache = ''
        if self.ctx.ccache and bool(int(environ.get('USE_CCACHE', '1'))):
            # print('ccache found, will optimize builds')
            ccache = self.ctx.ccache + ' '
            env['USE_CCACHE'] = '1'
            env['NDK_CCACHE'] = self.ctx.ccache
            env.update(
                {k: v
                 for k, v in environ.items() if k.startswith('CCACHE_')})

        cc = find_executable(
            '{command_prefix}-gcc'.format(command_prefix=command_prefix),
            path=environ['PATH'])
        if cc is None:
            print('Searching path are: {!r}'.format(environ['PATH']))
            warning('Couldn\'t find executable for CC. This indicates a '
                    'problem locating the CC executable in the Android '
                    'NDK, not that you don\'t have a normal compiler '
                    'installed. Exiting.')
            exit(1)

        if with_flags_in_cc:
            env['CC'] = '{ccache}{command_prefix}-gcc {cflags}'.format(
                command_prefix=command_prefix,
                ccache=ccache,
                cflags=env['CFLAGS'])
            env['CXX'] = '{ccache}{command_prefix}-g++ {cxxflags}'.format(
                command_prefix=command_prefix,
                ccache=ccache,
                cxxflags=env['CXXFLAGS'])
        else:
            env['CC'] = '{ccache}{command_prefix}-gcc'.format(
                command_prefix=command_prefix, ccache=ccache)
            env['CXX'] = '{ccache}{command_prefix}-g++'.format(
                command_prefix=command_prefix, ccache=ccache)

        env['AR'] = '{}-ar'.format(command_prefix)
        env['RANLIB'] = '{}-ranlib'.format(command_prefix)
        env['LD'] = '{}-ld'.format(command_prefix)
        # env['LDSHARED'] = join(self.ctx.root_dir, 'tools', 'liblink')
        # env['LDSHARED'] = env['LD']
        env['STRIP'] = '{}-strip --strip-unneeded'.format(command_prefix)
        env['MAKE'] = 'make -j5'
        env['READELF'] = '{}-readelf'.format(command_prefix)
        env['NM'] = '{}-nm'.format(command_prefix)

        #: frmdstryr: This seems to not be used anywhere, is it needed?
        try:
            hostpython_recipe = Recipe.get_recipe('hostpython2', self.ctx)

            # AND: This hardcodes python version 2.7, needs fixing
            env['BUILDLIB_PATH'] = join(
                hostpython_recipe.get_build_dir(self.arch), 'build',
                'lib.linux-{}-2.7'.format(uname()[-1]))
        except IOError:
            pass

        env['PATH'] = environ['PATH']

        env['ARCH'] = self.arch

        if self.ctx.python_recipe and self.ctx.python_recipe.from_crystax:
            env['CRYSTAX_PYTHON_VERSION'] = self.ctx.python_recipe.version

        return env
    def test_run_distribute(
        self,
        mock_sh_cp,
        mock_sh_rm,
        mock_listdir,
        mock_chdir,
        mock_ensure_dir,
        mock_strip_libraries,
        mock_create_python_bundle,
        mock_open_dist_files,
        mock_open_sdl2_files,
        mock_open_webview_files,
        mock_open_service_only_files,
    ):
        """
        A test for any overwritten method of
        `~pythonforandroid.bootstrap.Bootstrap.run_distribute`. Here we mock
        any file/dir operation that it could slow down our tests, and there is
        a lot to mock, because the `run_distribute` method it should take care
        of prepare all compiled files to generate the final `apk`. The targets
        of this test will be:

            - :meth:`~pythonforandroid.bootstraps.sdl2.BootstrapSdl2
              .run_distribute`
            - :meth:`~pythonforandroid.bootstraps.service_only
              .ServiceOnlyBootstrap.run_distribute`
            - :meth:`~pythonforandroid.bootstraps.webview.WebViewBootstrap
               .run_distribute`
            - :meth:`~pythonforandroid.bootstraps.empty.EmptyBootstrap.
              run_distribute`

        Here we will tests all those methods that are specific for each class.
        """
        # prepare bootstrap and distribution
        bs = Bootstrap().get_bootstrap(self.bootstrap_name, self.ctx)
        bs.build_dir = bs.get_build_dir()
        self.setUp_distribution_with_bootstrap(bs)

        self.ctx.hostpython = "/some/fake/hostpython3"
        self.ctx.python_recipe = Recipe.get_recipe("python3", self.ctx)
        self.ctx.python_modules = ["requests"]
        self.ctx.archs = [ArchARMv7_a(self.ctx)]

        bs.run_distribute()

        mock_open_dist_files.assert_called_once_with("dist_info.json", "w")
        mock_open_bootstraps = {
            "sdl2": mock_open_sdl2_files,
            "webview": mock_open_webview_files,
            "service_only": mock_open_service_only_files,
        }
        expected_open_calls = {
            "sdl2": [
                mock.call("local.properties", "w"),
                mock.call("blacklist.txt", "a"),
            ],
            "webview": [mock.call("local.properties", "w")],
            "service_only": [mock.call("local.properties", "w")],
        }
        mock_open_bs = mock_open_bootstraps[self.bootstrap_name]
        # test that the expected calls has been called
        for expected_call in expected_open_calls[self.bootstrap_name]:
            self.assertIn(expected_call, mock_open_bs.call_args_list)
        # test that the write function has been called with the expected args
        self.assertIn(
            mock.call().__enter__().write("sdk.dir=/opt/android/android-sdk"),
            mock_open_bs.mock_calls,
        )
        if self.bootstrap_name == "sdl2":
            self.assertIn(
                mock.call().__enter__().write(
                    "\nsqlite3/*\nlib-dynload/_sqlite3.so\n"),
                mock_open_bs.mock_calls,
            )

        # check that the other mocks we made are actually called
        mock_sh_rm.assert_called()
        mock_sh_cp.assert_called()
        mock_chdir.assert_called()
        mock_listdir.assert_called()
        mock_strip_libraries.assert_called()
        mock_create_python_bundle.assert_called()
Ejemplo n.º 57
0
    def build_arch(self, arch):
        recipe_build_dir = self.get_build_dir(arch.arch)

        # Create a subdirectory to actually perform the build
        build_dir = join(recipe_build_dir, 'android-build')
        ensure_dir(build_dir)

        # TODO: Get these dynamically, like bpo-30386 does
        sys_prefix = '/usr/local'
        sys_exec_prefix = '/usr/local'

        # Skipping "Ensure that nl_langinfo is broken" from the original bpo-30386

        platform_name = 'android-{}'.format(self.ctx.ndk_api)

        with current_directory(build_dir):
            env = environ.copy()

            # TODO: Get this information from p4a's arch system
            android_host = 'arm-linux-androideabi'
            android_build = sh.Command(join(recipe_build_dir, 'config.guess'))().stdout.strip().decode('utf-8')
            platform_dir = join(self.ctx.ndk_dir, 'platforms', platform_name, 'arch-arm')
            toolchain = '{android_host}-4.9'.format(android_host=android_host)
            toolchain = join(self.ctx.ndk_dir, 'toolchains', toolchain, 'prebuilt', 'linux-x86_64')
            CC = '{clang} -target {target} -gcc-toolchain {toolchain}'.format(
                clang=join(self.ctx.ndk_dir, 'toolchains', 'llvm', 'prebuilt', 'linux-x86_64', 'bin', 'clang'),
                target='armv7-none-linux-androideabi',
                toolchain=toolchain)

            AR = join(toolchain, 'bin', android_host) + '-ar'
            LD = join(toolchain, 'bin', android_host) + '-ld'
            RANLIB = join(toolchain, 'bin', android_host) + '-ranlib'
            READELF = join(toolchain, 'bin', android_host) + '-readelf'
            STRIP = join(toolchain, 'bin', android_host) + '-strip --strip-debug --strip-unneeded'

            env['CC'] = CC
            env['AR'] = AR
            env['LD'] = LD
            env['RANLIB'] = RANLIB
            env['READELF'] = READELF
            env['STRIP'] = STRIP

            env['PATH'] = '{hostpython_dir}:{old_path}'.format(
                hostpython_dir=self.get_recipe('hostpython3', self.ctx).get_path_to_python(),
                old_path=env['PATH'])

            ndk_flags = ('--sysroot={ndk_sysroot} -D__ANDROID_API__={android_api} '
                         '-isystem {ndk_android_host}').format(
                             ndk_sysroot=join(self.ctx.ndk_dir, 'sysroot'),
                             android_api=self.ctx.ndk_api,
                             ndk_android_host=join(
                                 self.ctx.ndk_dir, 'sysroot', 'usr', 'include', android_host))
            sysroot = join(self.ctx.ndk_dir, 'platforms', platform_name, 'arch-arm')
            env['CFLAGS'] = env.get('CFLAGS', '') + ' ' + ndk_flags
            env['CPPFLAGS'] = env.get('CPPFLAGS', '') + ' ' + ndk_flags
            env['LDFLAGS'] = env.get('LDFLAGS', '') + ' --sysroot={} -L{}'.format(sysroot, join(sysroot, 'usr', 'lib'))

            if 'openssl' in self.ctx.recipe_build_order:
                recipe = Recipe.get_recipe('openssl', self.ctx)
                openssl_build_dir = recipe.get_build_dir(arch.arch)
                ensure_dir('Modules')
                setuplocal = join('Modules', 'Setup.local')
                shprint(sh.cp, join(self.get_recipe_dir(), 'Setup.local-ssl'), setuplocal)
                shprint(sh.sed, '-i.backup', 's#^SSL=.*#SSL={}#'.format(openssl_build_dir), setuplocal)
                env['OPENSSL_VERSION'] = recipe.lib_version

            if 'sqlite3' in self.ctx.recipe_build_order:
                # Include sqlite3 in python2 build
                recipe = Recipe.get_recipe('sqlite3', self.ctx)
                include = ' -I' + recipe.get_build_dir(arch.arch)
                lib = ' -L' + recipe.get_lib_dir(arch) + ' -lsqlite3'
                # Insert or append to env
                flag = 'CPPFLAGS'
                env[flag] = env[flag] + include if flag in env else include
                flag = 'LDFLAGS'
                env[flag] = env[flag] + lib if flag in env else lib

            # Manually add the libs directory, and copy some object
            # files to the current directory otherwise they aren't
            # picked up. This seems necessary because the --sysroot
            # setting in LDFLAGS is overridden by the other flags.
            # TODO: Work out why this doesn't happen in the original
            # bpo-30386 Makefile system.
            logger.warning('Doing some hacky stuff to link properly')
            lib_dir = join(sysroot, 'usr', 'lib')
            env['LDFLAGS'] += ' -L{}'.format(lib_dir)
            shprint(sh.cp, join(lib_dir, 'crtbegin_so.o'), './')
            shprint(sh.cp, join(lib_dir, 'crtend_so.o'), './')

            env['SYSROOT'] = sysroot

            if not exists('config.status'):
                shprint(sh.Command(join(recipe_build_dir, 'configure')),
                        *(' '.join(('--host={android_host}',
                                    '--build={android_build}',
                                    '--enable-shared',
                                    '--disable-ipv6',
                                    'ac_cv_file__dev_ptmx=yes',
                                    'ac_cv_file__dev_ptc=no',
                                    '--without-ensurepip',
                                    'ac_cv_little_endian_double=yes',
                                    '--prefix={prefix}',
                                    '--exec-prefix={exec_prefix}')).format(
                                        android_host=android_host,
                                        android_build=android_build,
                                        prefix=sys_prefix,
                                        exec_prefix=sys_exec_prefix)).split(' '), _env=env)

            if not exists('python'):
                shprint(sh.make, 'all', _env=env)

            # TODO: Look into passing the path to pyconfig.h in a
            # better way, although this is probably acceptable
            sh.cp('pyconfig.h', join(recipe_build_dir, 'Include'))