def run_project_cmd(self, args, loader, manifest): if args.recursive: projects = loader.manifests_in_dependency_order() else: projects = [manifest] cache = cache_module.create_cache() all_packages = {} for m in projects: ctx = loader.ctx_gen.get_context(m.name) packages = m.get_required_system_packages(ctx) for k, v in packages.items(): merged = all_packages.get(k, []) merged += v all_packages[k] = merged manager = loader.build_opts.host_type.get_package_manager() if manager == "rpm": packages = sorted(list(set(all_packages["rpm"]))) if packages: run_cmd(["dnf", "install", "-y"] + packages) elif manager == "deb": packages = sorted(list(set(all_packages["deb"]))) if packages: run_cmd(["apt", "install", "-y"] + packages) else: print("I don't know how to install any packages on this system")
def run_project_cmd(self, args, loader, manifest): if args.recursive: projects = loader.manifests_in_dependency_order() else: projects = [manifest] cache = cache_module.create_cache() for m in projects: cached_project = CachedProject(cache, loader, m) if cached_project.download(): continue inst_dir = loader.get_project_install_dir(m) built_marker = os.path.join(inst_dir, ".built-by-getdeps") if os.path.exists(built_marker): with open(built_marker, "r") as f: built_hash = f.read().strip() project_hash = loader.get_project_hash(m) if built_hash == project_hash: continue # We need to fetch the sources fetcher = loader.create_fetcher(m) fetcher.update()
def run_project_cmd(self, args, loader, manifest): if args.clean: clean_dirs(loader.build_opts) print("Building on %s" % loader.ctx_gen.get_context(args.project)) projects = loader.manifests_in_dependency_order() cache = cache_module.create_cache() if args.use_build_cache else None # Accumulate the install directories so that the build steps # can find their dep installation install_dirs = [] for m in projects: fetcher = loader.create_fetcher(m) if args.clean: fetcher.clean() build_dir = loader.get_project_build_dir(m) inst_dir = loader.get_project_install_dir(m) if m == manifest or not args.no_deps: print("Assessing %s..." % m.name) project_hash = loader.get_project_hash(m) ctx = loader.ctx_gen.get_context(m.name) built_marker = os.path.join(inst_dir, ".built-by-getdeps") cached_project = CachedProject(cache, loader, m) reconfigure, sources_changed = self.compute_source_change_status( cached_project, fetcher, m, built_marker, project_hash) if sources_changed or reconfigure or not os.path.exists( built_marker): if os.path.exists(built_marker): os.unlink(built_marker) src_dir = fetcher.get_src_dir() builder = m.create_builder( loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader, final_install_prefix=loader.get_project_install_prefix( m), ) builder.build(install_dirs, reconfigure=reconfigure) with open(built_marker, "w") as f: f.write(project_hash) # Only populate the cache from continuous build runs if args.schedule_type == "continuous": cached_project.upload() install_dirs.append(inst_dir)
def run_project_cmd(self, args, loader, manifest): if args.clean: clean_dirs(loader.build_opts) print("Building on %s" % loader.ctx_gen.get_context(args.project)) projects = loader.manifests_in_dependency_order() cache = cache_module.create_cache() if args.use_build_cache else None # Accumulate the install directories so that the build steps # can find their dep installation install_dirs = [] for m in projects: fetcher = loader.create_fetcher(m) if isinstance(fetcher, SystemPackageFetcher): # We are guaranteed that if the fetcher is set to # SystemPackageFetcher then this item is completely # satisfied by the appropriate system packages continue if args.clean: fetcher.clean() build_dir = loader.get_project_build_dir(m) inst_dir = loader.get_project_install_dir(m) if (m == manifest and not args.only_deps or m != manifest and not args.no_deps): print("Assessing %s..." % m.name) project_hash = loader.get_project_hash(m) ctx = loader.ctx_gen.get_context(m.name) built_marker = os.path.join(inst_dir, ".built-by-getdeps") cached_project = CachedProject(cache, loader, m) reconfigure, sources_changed = self.compute_source_change_status( cached_project, fetcher, m, built_marker, project_hash) if os.path.exists( built_marker) and not cached_project.was_cached(): # We've previously built this. We may need to reconfigure if # our deps have changed, so let's check them. dep_reconfigure, dep_build = self.compute_dep_change_status( m, built_marker, loader) if dep_reconfigure: reconfigure = True if dep_build: sources_changed = True if sources_changed or reconfigure or not os.path.exists( built_marker): if os.path.exists(built_marker): os.unlink(built_marker) src_dir = fetcher.get_src_dir() builder = m.create_builder( loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader, final_install_prefix=loader.get_project_install_prefix( m), ) builder.build(install_dirs, reconfigure=reconfigure) with open(built_marker, "w") as f: f.write(project_hash) # Only populate the cache from continuous build runs if args.schedule_type == "continuous": cached_project.upload() install_dirs.append(inst_dir)
def run_project_cmd(self, args, loader, manifest): if args.clean: clean_dirs(loader.build_opts) print("Building on %s" % loader.ctx_gen.get_context(args.project)) projects = loader.manifests_in_dependency_order() cache = cache_module.create_cache() if args.use_build_cache else None # Accumulate the install directories so that the build steps # can find their dep installation install_dirs = [] for m in projects: fetcher = loader.create_fetcher(m) if isinstance(fetcher, SystemPackageFetcher): # We are guaranteed that if the fetcher is set to # SystemPackageFetcher then this item is completely # satisfied by the appropriate system packages continue if args.clean: fetcher.clean() build_dir = loader.get_project_build_dir(m) inst_dir = loader.get_project_install_dir(m) if (m == manifest and not args.only_deps or m != manifest and not args.no_deps): print("Assessing %s..." % m.name) project_hash = loader.get_project_hash(m) ctx = loader.ctx_gen.get_context(m.name) built_marker = os.path.join(inst_dir, ".built-by-getdeps") cached_project = CachedProject(cache, loader, m) reconfigure, sources_changed = self.compute_source_change_status( cached_project, fetcher, m, built_marker, project_hash) if os.path.exists( built_marker) and not cached_project.was_cached(): # We've previously built this. We may need to reconfigure if # our deps have changed, so let's check them. dep_reconfigure, dep_build = self.compute_dep_change_status( m, built_marker, loader) if dep_reconfigure: reconfigure = True if dep_build: sources_changed = True extra_cmake_defines = (json.loads(args.extra_cmake_defines) if args.extra_cmake_defines else {}) extra_b2_args = args.extra_b2_args or [] if sources_changed or reconfigure or not os.path.exists( built_marker): if os.path.exists(built_marker): os.unlink(built_marker) src_dir = fetcher.get_src_dir() # Prepare builders write out config before the main builder runs prepare_builders = m.create_prepare_builders( loader.build_opts, ctx, src_dir, build_dir, inst_dir, loader, ) for preparer in prepare_builders: preparer.prepare(install_dirs, reconfigure=reconfigure) builder = m.create_builder( loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader, final_install_prefix=loader.get_project_install_prefix( m), extra_cmake_defines=extra_cmake_defines, extra_b2_args=extra_b2_args, ) builder.build(install_dirs, reconfigure=reconfigure) with open(built_marker, "w") as f: f.write(project_hash) # Only populate the cache from continuous build runs if args.schedule_type == "continuous": cached_project.upload() elif args.verbose: print("found good %s" % built_marker) # Paths are resolved from front. We prepend rather than append as # the last project in topo order is the project itself, which # should be first in the path, then its deps and so on. install_dirs.insert(0, inst_dir)