def run(self, config, options, args, help=None): config.set_from_cmdline_options(options) config.buildscript = 'autobuild' config.autobuild_report_url = None config.verbose = False config.interact = False if options.reporturl is not None: config.autobuild_report_url = options.reporturl if options.verbose: config.verbose = True if not config.autobuild_report_url: raise UsageError(_('report url for autobuild not specified')) module_set = jhbuild.moduleset.load(config) module_list = module_set.get_module_list(args or config.modules, config.skip) # remove modules up to startat if options.startat: while module_list and module_list[0].name != options.startat: del module_list[0] if not module_list: raise FatalError(_('%s not in module list') % options.startat) build = jhbuild.frontends.get_buildscript(config, module_list) return build.build()
def run(self, config, options, args, help=None): config.set_from_cmdline_options(options) config.buildscript = 'tinderbox' if options.outputdir is not None: config.tinderbox_outputdir = options.outputdir if not config.tinderbox_outputdir: raise UsageError(_('output directory for tinderbox build not specified')) module_set = jhbuild.moduleset.load(config) full_module_list = module_set.get_full_module_list( args or config.modules, config.skip) module_list = module_set.remove_system_modules(full_module_list) # remove modules up to startat if options.startat: while module_list and module_list[0].name != options.startat: del module_list[0] if not module_list: raise FatalError(_('%s not in module list') % options.startat) if config.check_sysdeps: module_state = module_set.get_module_state(full_module_list) if not self.required_system_dependencies_installed(module_state): self.print_system_dependencies(module_state) raise FatalError(_('Required system dependencies not installed.' ' Install using the command %(cmd)s or to ' 'ignore system dependencies use command-line' ' option %(opt)s' \ % {'cmd' : "'jhbuild sysdeps --install'", 'opt' : '--nodeps'})) build = jhbuild.frontends.get_buildscript(config, module_list, module_set=module_set) return build.build()
def run(self, config, options, args, help=None): config.set_from_cmdline_options(options) module_set = jhbuild.moduleset.load(config) if options.startat and options.list_all_modules: raise UsageError( _('Conflicting options specified (\'--start-at\' and \'--all-modules\')' )) if options.list_all_modules: module_list = module_set.modules.values() else: module_list = module_set.get_module_list \ (args or config.modules, config.skip, tags=config.tags, include_suggests= not config.ignore_suggests, include_afters=options.list_optional_modules) # remove modules up to startat if options.startat: while module_list and module_list[0].name != options.startat: del module_list[0] if not module_list: raise FatalError(_('%s not in module list') % options.startat) for mod in module_list: if options.show_rev: rev = mod.get_revision() if rev: uprint('%s (%s)' % (mod.name, rev)) else: uprint(mod.name) else: uprint(mod.name)
def run(self, config, options, args, help=None): config.set_from_cmdline_options(options) config.buildscript = 'tinderbox' if options.outputdir is not None: config.tinderbox_outputdir = options.outputdir if not config.tinderbox_outputdir: raise UsageError( _('output directory for tinderbox build not specified')) module_set = jhbuild.moduleset.load(config) module_list = module_set.get_module_list(args or config.modules, config.skip) # remove modules up to startat if options.startat: while module_list and module_list[0].name != options.startat: del module_list[0] if not module_list: raise FatalError(_('%s not in module list') % options.startat) build = jhbuild.frontends.get_buildscript(config, module_list, module_set=module_set) return build.build()
def dep_resolve(node, resolved, seen, after): ''' Recursive depth-first search of the dependency tree. Creates the build order into the list 'resolved'. <after/> modules are added to the dependency tree but flagged. When search finished <after/> modules not a real dependency are removed. ''' circular = False seen.append(node) if include_suggests: edges = node.dependencies + node.suggests + node.after else: edges = node.dependencies + node.after # do not include <after> modules because a previous visited <after> # module may later be a hard dependency resolved_deps = [module for module, after_module in resolved \ if not after_module] for edge_name in edges: edge = self.modules.get(edge_name) if edge is None: if node not in [i[0] for i in resolved]: self._warn(_('%(module)s has a dependency on unknown' ' "%(invalid)s" module') % \ {'module' : node.name, 'invalid' : edge_name}) elif edge_name not in skip and edge not in resolved_deps: if edge in seen: # circular dependency detected circular = True if self.raise_exception_on_warning: # Translation of string not required - used in # unit tests only raise UsageError('Circular dependencies detected') if warn_about_circular_dependencies: self._warn(_('Circular dependencies detected: %s') \ % ' -> '.join([i.name for i in seen] \ + [edge.name])) break else: if edge_name in node.after: dep_resolve(edge, resolved, seen, True) elif edge_name in node.suggests: dep_resolve(edge, resolved, seen, after) elif edge_name in node.dependencies: dep_resolve(edge, resolved, seen, after) # hard dependency may be missed if a cyclic # dependency. Add it: if edge not in [i[0] for i in resolved]: resolved.append((edge, after)) seen.remove(node) if not circular: if node not in [i[0] for i in resolved]: resolved.append((node, after)) elif not after: # a dependency exists for an after, flag to keep for index, item in enumerate(resolved): if item[1] is True and item[0] == node: resolved[index] = (node, False)
def get_module_list(self, seed, skip=[], tags=[], ignore_cycles=False, ignore_suggests=False, include_optional_modules=False, ignore_missing=False, process_sysdeps=True): '''gets a list of module objects (in correct dependency order) needed to build the modules in the seed list''' if seed == 'all': seed = self.modules.keys() try: all_modules = [self.get_module(mod, ignore_case = True) for mod in seed if mod not in skip] except KeyError, e: raise UsageError(_('module "%s" not found') % e)
def run(self, config, options, args, help=None): if args: raise UsageError(_('no extra arguments expected')) # check whether the checkout root and install prefix are writable if not (os.path.isdir(config.checkoutroot) and os.access( config.checkoutroot, os.R_OK | os.W_OK | os.X_OK)): uprint( _('checkout root (%s) is not writable') % config.checkoutroot) if not (os.path.isdir(config.prefix) and os.access(config.prefix, os.R_OK | os.W_OK | os.X_OK)): uprint(_('install prefix (%s) is not writable') % config.prefix) # check whether various tools are installed if not check_version(['libtoolize', '--version'], r'libtoolize \([^)]*\) ([\d.]+)', '1.5'): uprint(_('%s not found') % 'libtool >= 1.5') if not check_version(['gettext', '--version'], r'gettext \([^)]*\) ([\d.]+)', '0.10.40'): uprint(_('%s not found') % 'gettext >= 0.10.40') if not check_version(['pkg-config', '--version'], r'^([\d.]+)', '0.14.0'): uprint(_('%s not found') % 'pkg-config >= 0.14.0') if not check_version(['autoconf', '--version'], r'autoconf \([^)]*\) ([\d.]+)', '2.53'): uprint(_('%s not found') % 'autoconf >= 2.53') if not check_version(['automake', '--version'], r'automake \([^)]*\) ([\d.]+)', '1.10'): uprint(_('%s not found') % 'automake >= 1.10') try: not_in_path = [] path = get_aclocal_path() macros = ['libtool.m4', 'gettext.m4', 'pkg.m4'] for macro in macros: if not inpath(macro, path): uprint( _("aclocal can't see %s macros") % (macro.split('.m4')[0])) if not_in_path.count(macro) == 0: not_in_path.append(macro) if len(not_in_path) > 0: uprint( _("Please copy the lacking macros (%s) in one of the following paths: %s" % (', '.join(not_in_path), ', '.join(path)))) except CommandError, exc: uprint(str(exc))
def _warn(self, msg): if self.raise_exception_on_warning: raise UsageError(msg) else: logging.warning(msg)
def get_full_module_list(self, module_names='all', skip=[], include_suggests=True, include_afters=False, warn_about_circular_dependencies=True): def dep_resolve(node, resolved, seen, after): ''' Recursive depth-first search of the dependency tree. Creates the build order into the list 'resolved'. <after/> modules are added to the dependency tree but flagged. When search finished <after/> modules not a real dependency are removed. ''' circular = False seen.append(node) if include_suggests: edges = node.dependencies + node.suggests + node.after else: edges = node.dependencies + node.after # do not include <after> modules because a previous visited <after> # module may later be a hard dependency resolved_deps = [module for module, after_module in resolved \ if not after_module] for edge_name in edges: edge = self.modules.get(edge_name) if edge is None: if node not in [i[0] for i in resolved]: self._warn(_('%(module)s has a dependency on unknown' ' "%(invalid)s" module') % \ {'module' : node.name, 'invalid' : edge_name}) elif edge_name not in skip and edge not in resolved_deps: if edge in seen: # circular dependency detected circular = True if self.raise_exception_on_warning: # Translation of string not required - used in # unit tests only raise UsageError('Circular dependencies detected') if warn_about_circular_dependencies: self._warn(_('Circular dependencies detected: %s') \ % ' -> '.join([i.name for i in seen] \ + [edge.name])) break else: if edge_name in node.after: dep_resolve(edge, resolved, seen, True) elif edge_name in node.suggests: dep_resolve(edge, resolved, seen, after) elif edge_name in node.dependencies: dep_resolve(edge, resolved, seen, after) # hard dependency may be missed if a cyclic # dependency. Add it: if edge not in [i[0] for i in resolved]: resolved.append((edge, after)) seen.remove(node) if not circular: if node not in [i[0] for i in resolved]: resolved.append((node, after)) elif not after: # a dependency exists for an after, flag to keep for index, item in enumerate(resolved): if item[1] is True and item[0] == node: resolved[index] = (node, False) if module_names == 'all': module_names = self.modules.keys() try: # remove skip modules from module_name list modules = [self.get_module(module, ignore_case = True) \ for module in module_names if module not in skip] except KeyError as e: raise UsageError(_("A module called '%s' could not be found.") % e) resolved = [] for module in modules: dep_resolve(module, resolved, [], False) if include_afters: module_list = [module[0] for module in resolved] else: module_list = [module for module, after_module in resolved \ if not after_module] if '*' in skip: module_list = [module for module in module_list \ if module.name in self.config.modules] return module_list
def run(self, config, options, args, help=None): if args: raise UsageError(_('no extra arguments expected')) # try creating jhbuild directories before checking they are accessible. try: os.makedirs(config.checkoutroot) os.makedirs(config.prefix) except OSError: pass # check whether the checkout root and install prefix are writable if not (os.path.isdir(config.checkoutroot) and os.access( config.checkoutroot, os.R_OK | os.W_OK | os.X_OK)): uprint( _('checkout root (%s) is not writable') % config.checkoutroot) if not (os.path.isdir(config.prefix) and os.access(config.prefix, os.R_OK | os.W_OK | os.X_OK)): uprint(_('install prefix (%s) is not writable') % config.prefix) autoconf = True # check whether various tools are installed if not check_version(['libtoolize', '--version'], r'libtoolize \([^)]*\) ([\d.]+)', '1.5'): uprint(_('%s not found') % 'libtool >= 1.5') if not check_version(['gettext', '--version'], r'gettext \([^)]*\) ([\d.]+)', '0.10.40'): uprint(_('%s not found') % 'gettext >= 0.10.40') if not check_version(['pkg-config', '--version'], r'^([\d.]+)', '0.14.0'): uprint(_('%s not found') % 'pkg-config >= 0.14.0') if not check_version(['autoconf', '--version'], r'autoconf \([^)]*\) ([\d.]+)', '2.53'): autoconf = False uprint(_('%s not found') % 'autoconf >= 2.53') if not check_version(['automake', '--version'], r'automake \([^)]*\) ([\d.]+)', '1.10'): uprint(_('%s not found') % 'automake >= 1.10') if (autoconf): self.check_m4() # XML catalog sanity checks xmlcatalog = True try: get_output(['which', 'xmlcatalog']) except: xmlcatalog = False uprint( _('Could not find XML catalog (usually part of the package \'libxml2-utils\')' )) if (xmlcatalog): for (item, name) in [ ('-//OASIS//DTD DocBook XML V4.1.2//EN', 'DocBook XML DTD V4.1.2'), ('http://docbook.sourceforge.net/release/xsl/current/html/chunk.xsl', 'DocBook XSL Stylesheets') ]: try: data = get_output(['xmlcatalog', '/etc/xml/catalog', item]) except: uprint( _('Could not find %s in XML catalog (usually part of package \'docbook-xsl\')' ) % name) # Perl module used by tools such as intltool: perlmod = 'XML::Parser' try: get_output(['perl', '-M%s' % perlmod, '-e', 'exit']) except: uprint( _('Could not find the Perl module %s (usually part of package \'libxml-parser-perl\' or \'perl-XML-Parser\')' ) % perlmod) # check for a downloading util: if not (inpath('curl', os.environ['PATH'].split(os.pathsep)) or inpath('wget', os.environ['PATH'].split(os.pathsep))): uprint(_('curl or wget not found')) # check for git: if not inpath('git', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'git') else: try: git_help = os.popen('git --help', 'r').read() if not 'clone' in git_help: uprint(_('Installed git program is not the right git')) else: if not check_version(['git', '--version'], r'git version ([\d.]+)', '1.5.6'): uprint(_('%s not found') % 'git >= 1.5.6') except: uprint(_('Could not check git program')) # check for flex/bison: if not inpath('flex', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'flex') if not inpath('bison', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'bison') if not inpath('xzcat', os.environ['PATH'].split(os.pathsep)): uprint(_('%s not found') % 'xzcat')
class ModuleSet: def __init__(self, config = None, db=None): self.config = config self.modules = {} if db is None: legacy_pkgdb_path = os.path.join(self.config.prefix, 'share', 'jhbuild', 'packagedb.xml') new_pkgdb_path = os.path.join(self.config.top_builddir, 'packagedb.xml') if os.path.isfile(legacy_pkgdb_path): os.rename(legacy_pkgdb_path, new_pkgdb_path) self.packagedb = packagedb.PackageDB(new_pkgdb_path, config) else: self.packagedb = db def add(self, module): '''add a Module object to this set of modules''' self.modules[module.name] = module def get_module(self, module_name, ignore_case = False): if self.modules.has_key(module_name) or not ignore_case: return self.modules[module_name] module_name_lower = module_name.lower() for module in self.modules.keys(): if module.lower() == module_name_lower: logging.info(_('fixed case of module \'%(orig)s\' to ' '\'%(new)s\'') % {'orig': module_name, 'new': module}) return self.modules[module] raise KeyError(module_name) def get_module_list(self, seed, skip=[], tags=[], ignore_cycles=False, ignore_suggests=False, include_optional_modules=False, ignore_missing=False, process_sysdeps=True): '''gets a list of module objects (in correct dependency order) needed to build the modules in the seed list''' if seed == 'all': seed = self.modules.keys() try: all_modules = [self.get_module(mod, ignore_case = True) for mod in seed if mod not in skip] except KeyError, e: raise UsageError(_('module "%s" not found') % e) asked_modules = all_modules[:] # 1st: get all modules that will be needed # note this is only needed to skip "after" modules that would not # otherwise be built i = 0 while i < len(all_modules): dep_missing = False for modname in all_modules[i].dependencies: depmod = self.modules.get(modname) if not depmod: if not ignore_missing: raise UsageError(_( '%(module)s has a dependency on unknown "%(invalid)s" module') % { 'module': all_modules[i].name, 'invalid': modname}) logging.info(_( '%(module)s has a dependency on unknown "%(invalid)s" module') % { 'module': all_modules[i].name, 'invalid': modname}) dep_missing = True continue if not depmod in all_modules: all_modules.append(depmod) if not ignore_suggests: # suggests can be ignored if not in moduleset for modname in all_modules[i].suggests: depmod = self.modules.get(modname) if not depmod: continue if not depmod in all_modules: all_modules.append(depmod) if dep_missing: del all_modules[i] i += 1 # 2nd: order them, raise an exception on hard dependency cycle, ignore # them for soft dependencies self._ordered = [] self._state = {} for modname in skip: # mark skipped modules as already processed self._state[self.modules.get(modname)] = 'processed' # process_sysdeps lets us avoid repeatedly checking system module state when # handling recursive dependencies. if self.config.partial_build and process_sysdeps: system_module_state = self.get_system_modules(all_modules) for pkg_config,(module, req_version, installed_version, new_enough) in system_module_state.iteritems(): # Only mark a module as processed if new enough *and* we haven't built it before if new_enough and not self.packagedb.check(module.name): self._state[module] = 'processed' if tags: for modname in self.modules: for tag in tags: if tag in self.modules[modname].tags: break else: # no tag matched, mark module as processed self._state[self.modules[modname]] = 'processed' def order(modules, module, mode = 'dependencies'): if self._state.get(module, 'clean') == 'processed': # already seen return if self._state.get(module, 'clean') == 'in-progress': # dependency circle, abort when processing hard dependencies if not ignore_cycles: raise DependencyCycleError() else: self._state[module] = 'in-progress' return self._state[module] = 'in-progress' for modname in module.dependencies: try: depmod = self.modules[modname] order([self.modules[x] for x in depmod.dependencies], depmod, 'dependencies') except KeyError: pass # user already notified via logging.info above if not ignore_suggests: for modname in module.suggests: depmod = self.modules.get(modname) if not depmod: continue save_state, save_ordered = self._state.copy(), self._ordered[:] try: order([self.modules[x] for x in depmod.dependencies], depmod, 'suggests') except DependencyCycleError: self._state, self._ordered = save_state, save_ordered except KeyError: pass # user already notified via logging.info above extra_afters = [] for modname in module.after: depmod = self.modules.get(modname) if not depmod: # this module doesn't exist, skip. continue if not depmod in all_modules and not include_optional_modules: # skip modules that would not be built otherwise # (build_optional_modules being the argument to force them # to be included nevertheless) if not depmod.dependencies: # depmod itself has no dependencies, skip. continue # more expensive, if depmod has dependencies, compute its # full list of hard dependencies, getting it into # extra_afters, so they are also evaluated. # <http://bugzilla.gnome.org/show_bug.cgi?id=546640> t_ms = ModuleSet(self.config) t_ms.modules = self.modules.copy() dep_modules = t_ms.get_module_list(seed=[depmod.name], process_sysdeps=False) for m in dep_modules[:-1]: if m in all_modules: extra_afters.append(m) continue save_state, save_ordered = self._state.copy(), self._ordered[:] try: order([self.modules[x] for x in depmod.dependencies], depmod, 'after') except DependencyCycleError: self._state, self._ordered = save_state, save_ordered for depmod in extra_afters: save_state, save_ordered = self._state.copy(), self._ordered[:] try: order([self.modules[x] for x in depmod.dependencies], depmod, 'after') except DependencyCycleError: self._state, self._ordered = save_state, save_ordered self._state[module] = 'processed' self._ordered.append(module) for i, module in enumerate(all_modules): order([], module) if i+1 == len(asked_modules): break ordered = self._ordered[:] del self._ordered del self._state return ordered