def load_defaults(self): self.set_property('cache_file', None) self.set_property('home_dir', self._default_home_dir()) self.set_property('prefix', None) self.set_property('sources', None) self.set_property('local_sources', None) self.set_property('cached_sources', self._relative_path('sources')) self.set_property('git_root', DEFAULT_GIT_ROOT) self.set_property('allow_parallel_build', DEFAULT_ALLOW_PARALLEL_BUILD) self.set_property('host', None) self.set_property('build', None) self.set_property('target', None) platform, arch, distro, distro_version, num_of_cpus = system_info() self.set_property('platform', platform) self.set_property('num_of_cpus', num_of_cpus) self.set_property('target_platform', platform) self.set_property('arch', arch) self.set_property('target_arch', arch) self.set_property('distro', distro) self.set_property('target_distro', distro) self.set_property('distro_version', distro_version) self.set_property('target_distro_version', distro_version) self.set_property('packages_prefix', None) self.set_property('packager', DEFAULT_PACKAGER) self.set_property('package_tarball_compression', 'bz2') stdlibpath = sysconfig.get_path('stdlib', vars={'installed_base': ''})[1:] # Ensure that the path uses / as path separator and not \ self.set_property('py_prefix', PurePath(stdlibpath).as_posix()) self.set_property('lib_suffix', '') self.set_property('data_dir', self._find_data_dir()) self.set_property('environ_dir', self._relative_path('config')) self.set_property('recipes_dir', self._relative_path('recipes')) self.set_property('packages_dir', self._relative_path('packages')) self.set_property('allow_system_libs', True) self.set_property('use_configure_cache', False) self.set_property('external_recipes', {}) self.set_property('external_packages', {}) self.set_property('universal_archs', None) self.set_property('variants', []) self.set_property('build_tools_prefix', None) self.set_property('build_tools_sources', None) self.set_property('build_tools_cache', None) self.set_property('recipes_commits', {}) self.set_property('recipes_remotes', {}) self.set_property('extra_build_tools', []) self.set_property('distro_packages_install', True) self.set_property('interactive', m.console_is_interactive()) self.set_property('meson_cross_properties', {}) self.set_property('manifest', None) self.set_property('extra_properties', {}) self.set_property('extra_mirrors', []) self.set_property('extra_bootstrap_packages', {}) self.set_property('bash_completions', set()) # Increase open-files limits set_nofile_ulimit()
def _create_framework_library(self, libraries): tmpdir = tempfile.mkdtemp() libname = os.path.basename (self.libname) # just to make sure if self.arch == Architecture.UNIVERSAL: archs = self.universal_archs else: archs = [self.arch] archs = [a if a != Architecture.X86 else 'i386' for a in archs] split_queue = asyncio.Queue() join_queues = collections.defaultdict(asyncio.Queue) for thin_arch in archs: shell.call ('mkdir -p %s' % thin_arch, tmpdir, env=self.env) status = BuildStatusPrinter(archs, m.console_is_interactive()) for lib in libraries: for thin_arch in archs: split_queue.put_nowait((lib, thin_arch)) status.arch_total[thin_arch] += 1 async def split_library_worker(): while True: lib, thin_arch = await split_queue.get() tmpdir_thinarch = os.path.join(tmpdir, thin_arch) libprefix = os.path.split(lib)[-1].replace('.', '_') if len(archs) > 1: #should be a fat file, split only to the arch we want libprefix += '_%s_' % thin_arch lib_tmpdir = await self._split_static_lib(lib, thin_arch) else: lib_tmpdir = await self._split_static_lib(lib) if lib_tmpdir is None: # arch is not supported in the static lib, skip it status.inc_arch (thin_arch) split_queue.task_done() continue obj_files = shell.ls_files(['*.o'], lib_tmpdir) obj_dict = {} for obj_f in obj_files: obj_path = os.path.join(lib_tmpdir, obj_f) md5 = (await shell.async_call_output(['md5', '-q', obj_path], env=self.env)).split('\n')[0] md5 = '%s-%s' % (md5, os.path.getsize(obj_path)) obj_dict[obj_f] = md5 join_queues[thin_arch].put_nowait((lib, lib_tmpdir, obj_dict)) split_queue.task_done() async def join_library_worker(q, thin_arch): object_files_md5 = [] while True: lib, lib_tmpdir, obj_dict = await q.get() status.inc_arch (thin_arch) tmpdir_thinarch = os.path.join(tmpdir, thin_arch) libprefix = os.path.split(lib)[-1].replace('.', '_') target_objs = [] for obj_f, md5 in obj_dict.items(): obj_path = os.path.join(lib_tmpdir, obj_f) if md5 not in object_files_md5: target_name = '%s-%s' % (libprefix, obj_f) try: # Hard link source file to the target name os.link(obj_path, tmpdir_thinarch + '/' + target_name) except: # Fall back to cp if hard link doesn't work for any reason await shell.async_call(['cp', obj_path, target_name], tmpdir_thinarch, env=self.env) # If we have a duplicate object, commit any collected ones if target_name in target_objs: m.warning ("Committing %d objects due to dup %s" % (len (target_objs), target_name)) await shell.async_call(['ar', '-cqS', libname] + target_objs, tmpdir_thinarch, env=self.env) target_objs = [] target_objs.append (target_name) object_files_md5.append(md5) # Put all the collected target_objs in the archive. cmdline limit is 262k args on OSX. if len(target_objs): await shell.async_call(['ar', '-cqS', libname] + target_objs, tmpdir_thinarch, env=self.env) shutil.rmtree(lib_tmpdir) q.task_done() async def post_join_worker(thin_arch): tmpdir_thinarch = os.path.join(tmpdir, thin_arch) await shell.async_call(['ar', '-s', libname], tmpdir_thinarch, env=self.env) lib = os.path.join(tmpdir, thin_arch, libname) await self._check_duplicated_symbols(lib, tmpdir) async def split_join_task(): tasks = [asyncio.ensure_future(join_library_worker(join_queues[arch], arch)) for arch in archs] [tasks.append(asyncio.ensure_future(split_library_worker())) for i in range(len(archs))] async def split_join_queues_done(): await split_queue.join() for arch in archs: await join_queues[arch].join() await run_tasks(tasks, split_join_queues_done()) tasks = [asyncio.ensure_future(post_join_worker(thin_arch)) for thin_arch in archs] await run_tasks(tasks) run_until_complete(split_join_task()) if len(archs) > 1: #merge the final libs into a fat file again files = [os.path.join(tmpdir, arch, libname) for arch in archs] shell.new_call(['lipo'] + files + ['-create' ,'-output', self.install_name], tmpdir, env=self.env) else: shell.new_call(['cp', os.path.join(tmpdir, self.arch, libname), self.install_name], tmpdir, env=self.env) shutil.rmtree(tmpdir)