class PythonDependency(Dependency): def __init__(self, name): Dependency.__init__(self, name) self.__opt = LocalOptions(name) def set_cache(self, cache): cache.register(self.__opt) def add_options(self, parser): self.__opt.add_option( parser, '--with-{0}'.format(self.name), type='string', dest='path', metavar='PATH', help='use {0} includes/libraries from this path'.format(self.name)) def apply(self, obj): return @property def _path(self): return getattr(self.__opt, 'path', None) def state_merge(self, value): self.__opt.state_merge({"path": value}) @property def has_options(self): return True @property def is_satisfied(self): return self._path is not None
class PythonDependency(Dependency): def __init__(self, name): Dependency.__init__(self, name) self.__opt = LocalOptions(name) def set_cache(self, cache): cache.register(self.__opt) def add_options(self, parser): self.__opt.add_option(parser, '--with-{0}'.format(self.name), type = 'string', dest = 'path', metavar = 'PATH', help = 'use {0} includes/libraries from this path'.format(self.name)) def apply(self, obj): return @property def _path(self): return getattr(self.__opt, 'path', None) def state_merge(self, value): self.__opt.state_merge({ "path": value }) @property def has_options(self): return True @property def is_satisfied(self): return self._path is not None
class CMakeCoverage(object): def __init__(self, env, optparser, cache): self.__env = env self.__root = env.getcwd() self.__exclude = [] self.__opt = LocalOptions('coverage') cache.register(self.__opt) if self.__env.has('tools', 'gcovr'): self.__opt.state_merge({ 'gcovr_path': self.__env.get('tools', 'gcovr') }) cover = OptionGroup(optparser, "CMake Coverage Options") self.__opt.add_bool_option(cover, '--coverage-xml', dest = 'xml', help = 'generate XML output', cache = False) self.__opt.add_bool_option(cover, '--coverage-keep', dest = 'keep', help = 'keep intermediate output', cache = False) self.__opt.add_option(cover, '--coverage-output', type = 'string', dest = 'output', metavar = 'FILE', help = 'write output to this file', cache = False) self.__opt.add_option(cover, '--coverage-gcovr-path', type = 'string', default = '/usr/bin/gcovr', dest = 'gcovr_path', metavar = 'FILE', help = 'location of gcovr script') optparser.add_option_group(cover) def __cleanup_gcov_mess(self): for root, dirs, files in os.walk(self.__env.getcwd()): for f in files: if f.endswith('.gcov'): source = os.path.join(root, f) target = f.replace('#', '/') trel = target if self.__root is None else os.path.relpath(target, self.__root) if self.__opt.keep and (self.__root is None or target.startswith(self.__root)) and not any(re.match(e, trel) for e in self.__exclude): self.__env.move(source, target, False) else: self.__env.remove_files(source) def exclude(self, *args): self.__exclude += args def set_root(self, root): self.__root = root def run(self): options = [] if self.__root is not None: options += ['-r', self.__root] if self.__exclude: for re in self.__exclude: options += ['-e', re] if self.__opt.output is not None: options += ['-o', self.__opt.output] if self.__opt.keep: options += ['--keep'] if self.__opt.xml: options += ['--xml'] self.__env.execute(self.__opt.gcovr_path, *options) self.__cleanup_gcov_mess()
class CMakeCoverage(object): def __init__(self, env, optparser, cache): self.__env = env self.__root = env.getcwd() self.__exclude = [] self.__opt = LocalOptions('coverage') cache.register(self.__opt) if self.__env.has('tools', 'gcovr'): self.__opt.state_merge( {'gcovr_path': self.__env.get('tools', 'gcovr')}) cover = OptionGroup(optparser, "CMake Coverage Options") self.__opt.add_bool_option(cover, '--coverage-xml', dest='xml', help='generate XML output', cache=False) self.__opt.add_bool_option(cover, '--coverage-keep', dest='keep', help='keep intermediate output', cache=False) self.__opt.add_option(cover, '--coverage-output', type='string', dest='output', metavar='FILE', help='write output to this file', cache=False) self.__opt.add_option(cover, '--coverage-gcovr-path', type='string', default='/usr/bin/gcovr', dest='gcovr_path', metavar='FILE', help='location of gcovr script') optparser.add_option_group(cover) def __cleanup_gcov_mess(self): for root, dirs, files in os.walk(self.__env.getcwd()): for f in files: if f.endswith('.gcov'): source = os.path.join(root, f) target = f.replace('#', '/') trel = target if self.__root is None else os.path.relpath( target, self.__root) if self.__opt.keep and (self.__root is None or target.startswith( self.__root)) and not any( re.match(e, trel) for e in self.__exclude): self.__env.move(source, target, False) else: self.__env.remove_files(source) def exclude(self, *args): self.__exclude += args def set_root(self, root): self.__root = root def run(self): options = [] if self.__root is not None: options += ['-r', self.__root] if self.__exclude: for re in self.__exclude: options += ['-e', re] if self.__opt.output is not None: options += ['-o', self.__opt.output] if self.__opt.keep: options += ['--keep'] if self.__opt.xml: options += ['--xml'] self.__env.execute(self.__opt.gcovr_path, *options) self.__cleanup_gcov_mess()
class Dependencies(object): __dependency_group_classes = set([DependencyGroup]) def __init__(self, env, default_dependency_class): self.__env = env self.__default = default_dependency_class self.__groups = {} self.__opt = LocalOptions() @classmethod def register_group_class(cls, dependency_group_class): cls.__dependency_group_classes.add(dependency_group_class) def add(self, *args): for arg in args: if isinstance(arg, basestring): arg = self.__default(arg) assert isinstance(arg, Dependency) group = None for cls in self.__group_classes(): if cls.can_manage(arg): if cls not in self.__groups: self.__groups[cls] = cls(self.__env) group = self.__groups[cls] break if group is not None: group.add(arg) else: raise RuntimeError('No class found that can manage ' + arg.__class__.__name__) def any_is_a(self, cls): return any(grp.any_is_a(cls) for grp in self.__groups.itervalues()) def __group_classes(self): src = [] for group in self.__dependency_group_classes: for managed in group.managed_classes: src.append((managed, group)) dst = [] while src: cls = src.pop(0) if any(issubclass(x[0], cls[0]) for x in src): src.append(cls) elif cls[1] not in dst: dst.append(cls[1]) return dst def set_cache(self, cache): for grp in self.__groups.itervalues(): try: grp.set_cache(cache) except Exception as ex: sys.stderr.write(str(ex) + '\n') def add_options(self, parser, nomerge=False): if self.__groups: if not nomerge and self.__env.has('dependencies', 'search_path'): self.__opt.state_merge({ 'deps_from': self.__env.get('dependencies', 'search_path') }) og = OptionGroup(parser, "General Dependency Options") self.__opt.add_option(og, '--with-deps-from', type='string', dest='deps_from', metavar='PATH', help='scan this path for dependencies') parser.add_option_group(og) for cls in sorted(self.__groups, key=operator.attrgetter('__name__')): self.__groups[cls].add_options(parser, nomerge) def __autoresolve(self): if any(group.has_unsatisfied_dependencies for group in self.__groups.itervalues()): self.__env.say('Resolving dependencies...') path = self.__opt.deps_from fast = True if path.startswith('slow:'): path = path[5:] fast = False found = mirbuild.walk.Walker(path, fastscan=fast, env=self.__env).dependencies for group in self.__groups.itervalues(): group.set_unsatisfied_dependencies(found) def apply(self, obj): if self.__groups and self.__opt.deps_from: self.__autoresolve() for grp in self.__groups.itervalues(): grp.apply(obj) def get_dependency_group(self, cls): return self.__groups.get(cls) @property def meta(self): m = [] for grp in self.__groups.itervalues(): m += grp.meta return m
class CLibraryDependency(Dependency): def __init__(self, name): Dependency.__init__(self, name) self.__opt = LocalOptions(name) @staticmethod def isdir(basepath, env=None, *path): if basepath is not None: result = os.path.realpath( os.path.join(os.path.expanduser(basepath), *path)) return os.path.isdir(result) else: raise ValueError @staticmethod def validated_path(basepath, env=None, *path): """ Return the given path prefixed with the path given by the --with-... options If an environment is given as keyword parameter 'env', the existence of the returned path is checked and a warning is output through the environment in case it does not exist. """ if basepath is not None: result = os.path.realpath( os.path.join(os.path.expanduser(basepath), *path)) if env is not None and not os.path.isdir(result): env.warn(result + ' not found.') return result else: raise ValueError def _validated_path(self, env=None, *path): return CLibraryDependency.validated_path(self._path, env, *path) def _isdir(self, env=None, *path): return CLibraryDependency.isdir(self._path, env, *path) def set_cache(self, cache): cache.register(self.__opt) def add_options(self, parser): self.__opt.add_option( parser, '--with-{0}'.format(self.name), type='string', dest='path', metavar='PATH', help='use {0} includes/libraries from this path'.format(self.name)) def apply(self, obj): if self._path: # Adding includes is simple, in or out of source makes no difference ipath = self._validated_path(obj.env, 'include') obj.add_include_path(ipath) obj.env.dbg('Added inc-path: {0}'.format(ipath)) # Adding library paths is a little more complex since the dependencies could # have been build either in-source or out-of-source and we don't really know. # We'll use a little bit of educated guess work try try and figure it out! # This is the in-source library path path = 'lib' path_exists = self._isdir(obj.env, path) # This is the out-of-source library path oospath = os.path.join(obj.env.oosbuild_dir, path) oospath_exists = self._isdir(obj.env, oospath) # Ok, we now need to try and find the most suitable library path: # - If the most suitable library path exists use it # - If it doesn't exist but there's an alternative use that # - If neither exist fallback on the most suitable (warning it's not there) if obj.env.out_of_source: if oospath_exists or not path_exists: path = oospath else: if not path_exists and oospath_exists: path = oospath vpath = self._validated_path(obj.env, path) # Add the verified path we decided to use (with a little bit of debug for good measure) obj.add_library_path(vpath) obj.env.dbg('Added lib-path: {0}'.format(vpath)) @property def _path(self): return getattr(self.__opt, 'path', None) def state_merge(self, value): self.__opt.state_merge({"path": value}) @property def has_options(self): return True @property def is_satisfied(self): return self._path is not None
class Project(object): environment_class = mirbuild.environment.Environment test_runner_class = mirbuild.test.BoostTestRunner default_dependency_class = None nocache_commands = set('meta'.split()) noapply_commands = set('meta clean realclean distclean'.split()) def __init__(self, name, **opts): self.__configurecache = mirbuild.cache.Cache(filename = 'configure.json') self.__options = opts self.__tests = None self.__versions = [] self.__plugins = [] self.__install = [] self.__packagers = {} self.__test_runners = {} try: self.__configurecache.load() except Exception: # if we can't load the cache, so be it pass self.__parser = OptionParser(add_help_option = False) self.__general_options = OptionGroup(self.__parser, "General Options") self.__parser.add_option_group(self.__general_options) self.opt = LocalOptions('general') self.__configurecache.register(self.opt) if self.has_build_configs: self.add_option('-c|--configuration', dest = 'configuration', type = 'string', defaultstr = False, metavar = 'CFG', help = 'selected build configuration') self.add_bool_option('-h|--help', dest = 'help', help = 'show this help message and exit', cache = False) for opt, dest, help in [('-d|--debug', 'debug', 'debug build.py execution'), ('-q|--quiet', 'quiet', 'be quiet'), ('-v|--verbose', 'verbose', 'verbose compiler/packaging output'), ('--trace', 'trace', 'trace build process (if supported by the builder)'), ('--nodeps', 'nodeps', "don't use dependencies from .mirbuildrc"), ('--noconfig', 'noconfig', "don't use .mirbuildrc files at all"), ('--noenv', 'noenv', "don't honour environment variables"), ('--called-by-packager', 'called_by_packager', "option indicating that build.py is being invoked by a mirbuild packager"), ]: self.add_bool_option(opt, dest = dest, help = help, cache = False) for o in opt.split('|'): if o in sys.argv: self.opt.set_value(dest, True) self.__env = self.environment_class(name) self.__env.set_options(self.opt) self._deps = mirbuild.dependency.Dependencies(self.__env, self.default_dependency_class) if not self.opt.noconfig: self.__env.read_config() if self.__env.has('build', 'prefix'): self.opt.state_merge({ 'prefix': self.__env.get('build', 'prefix') }) self.opt.ensure_value('jobs', self.__env.get('build', 'parallel', 'auto')) self.add_option('-j|--jobs', dest = 'jobs', type = 'string', metavar = 'NUM', cache = False, help = 'number of parallel jobs to execute if possible') self.add_option('--prefix', dest = 'prefix', type = 'string', default = self.default_install_path, metavar = 'PATH', help = 'install prefix for this project') self.add_option('--install-destdir', dest = 'install_destdir', type = 'string', metavar = 'PATH', help = 'install files to this path') self.add_option('-b|--build-mode', dest = 'build_mode', type = 'choice', choices = ['in', 'out'], default = 'in', metavar = "MODE", help = '[in|out] source build mode') for o in [('-I|--include-path', 'include', 'C_INCLUDE_PATH CPLUS_INCLUDE_PATH'.split()), ('-L|--library-path', 'library', 'LIBRARY_PATH'.split())]: var = o[1] + '_path' if hasattr(self, 'add_' + var): path = [] if not self.opt.noenv: for e in o[2]: path += [x for x in os.environ.get(e, '').split(os.path.pathsep) if x] path += [x for x in self.env.get('build', var, '').split(os.path.pathsep) if x] self.opt.state_merge({ var: path }) self.add_option(o[0], type = 'string', dest = var, multi = True, metavar = 'PATH', help = 'use additional ' + o[1] + ' path') @property def _configure_cache(self): return self.__configurecache @property def _option_parser(self): return self.__parser @property def default_install_path(self): return '/usr/local' @property def ident(self): return self.__parser.get_prog_name() @property def options(self): return self.__options @property def env(self): return self.__env @property def commands(self): return self.methlist('run_(\w+)') @property def build_configurations(self): return self.methlist('configure_(\w+)') @property def tests(self): return self.__tests @property def packager(self): return self.__packagers[self.opt.packager] @property def project_name(self): return self.__env.project_name @property def build_config(self): return getattr(self.opt, 'configuration', None) @property def has_thrift_dependency(self): # We need to know if there's a thrift dependency as we'll need to # configure some additional things for Visual Studio if we do # [mhx] We could cache the result, but I'd rather not bother with that now... return self._deps.any_is_a(mirbuild.ThriftDependency) def prefixpath(self, path): if os.path.isabs(path): return path else: return os.path.join(self.opt.prefix, path) def installpath(self, path, isdir = False, mkdir = False): destdir = path if isdir else os.path.split(path)[0] if os.path.isabs(destdir): if self.opt.install_destdir is not None: destdir = os.path.join(self.opt.install_destdir, rootrelpath(destdir)) else: destdir = destdir else: if self.opt.install_destdir is not None: destdir = os.path.join(self.opt.install_destdir, rootrelpath(self.opt.prefix), destdir) else: destdir = os.path.join(self.opt.prefix, destdir) if mkdir: try: os.makedirs(destdir) except OSError as ex: if ex.errno != errno.EEXIST: raise return destdir if isdir else os.path.join(destdir, os.path.split(path)[1]) def __usage(self): usage = 'Usage: %prog [Options] <Command>' usage += '\n\nCommands: {0}'.format(', '.join(self.commands)) if self.has_build_configs: usage += '\n\nBuild Configurations: {0}'.format(', '.join(map(lambda x: (x + ' [*]') \ if x == self.__default_build_config() else x, self.build_configurations))) return usage def __default_build_config(self): if not self.has_build_configs: return None if self.opt.configuration is not None: return self.opt.configuration try: return self.env.get('build', 'configuration') except Exception: pass if 'release' in self.build_configurations: return 'release' return self.build_configurations[0] def methlist(self, match): list = [] run = re.compile(match) for method in dir(self): m = run.match(method) if m is not None and getattr(self, method) is not None: list.append(m.group(1)) list.sort() return list def add_option(self, *args, **kw): self.opt.add_option(self.__general_options, *args, **kw) def add_bool_option(self, *args, **kw): self.opt.add_bool_option(self.__general_options, *args, **kw) def depends(self, *deps): self._deps.add(*deps) def test(self, *args, **kwargs): filt = kwargs.get('filter', lambda x: True) recurse = kwargs.get('recurse', True) runner_class = kwargs.get('runner', self.test_runner_class) test_builders = [] if self.__tests is None: self.__tests = [] for arg in args: if isinstance(arg, mirbuild.test.TestBuilder): test_builders.append(arg) elif isinstance(arg, basestring): dirs = [] for e in glob.glob(arg): if os.path.isdir(e): if filt(e) and self.test_builder_class.looks_like_test_dir(e): dirs.append(e) if recurse: for root, ds, fs in os.walk(e): for d in ds: path = os.path.join(root, d) if filt(path) and self.test_builder_class.looks_like_test_dir(path): dirs.append(path) dirs.sort() for d in dirs: test_builders.append(self.test_builder_class(self.env, d)) else: test_builders.append(self.test_builder_class(self.env, *arg)) if test_builders: if runner_class.name not in self.__test_runners: self.__test_runners[runner_class.name] = runner_class(self.env) runner = self.__test_runners[runner_class.name] for tb in test_builders: self.__tests.append(mirbuild.test.TestWrapper(builder = tb, runner = runner)) def package(self, *args): for arg in args: assert isinstance(arg, mirbuild.packaging.Packaging) assert not self.__packagers.has_key(arg.name) self.__packagers[arg.name] = arg def install(self, source, destdir, glob = True): i = InstallRule() i.source = [source] if isinstance(source, basestring) else source i.destdir = destdir i.glob = glob self.__install.append(i) def __install_files(self): for i in self.__install: destdir = self.installpath(i.destdir, isdir = True, mkdir = True) source = [] for src in i.source: if i.glob: source += glob.glob(src) else: source.append(src) for src in source: dst = os.path.join(destdir, os.path.split(src)[1]) self.env.vsay('installing {0} -> {1}'.format(src, dst)) if os.path.isdir(src): shutil.copytree(src, dst, symlinks = True) else: shutil.copy2(src, dst) def version(self, file = os.path.join('src', 'version.h'), info = None, **opts): if isinstance(file, basestring): file = mirbuild.version.VersionFileFactory.create(self.env, file, **opts) if info is None: info = mirbuild.version.VersionInfoFactory.create() assert isinstance(file, mirbuild.version.VersionFile) assert isinstance(info, mirbuild.version.VersionInfo) self.__versions.append({ 'file': file, 'info': info }) def add_plugin(self, *args): for arg in args: assert isinstance(arg, mirbuild.plugin.Plugin) self.__plugins.append(arg) def _run_plugins(self, meth, reverse = False): for plugin in reversed(self.__plugins) if reverse else self.__plugins: self.env.dbg("running plugin method {0}.{1}".format(plugin.__class__.__name__, meth)) getattr(plugin, meth)(self) @property def has_build_configs(self): return len(self.build_configurations) > 0 def __expand_command(self, raw): if raw in self.commands: return raw cand = [cmd for cmd in self.commands if cmd.startswith(raw)] if len(cand) == 1: return cand[0] raise RuntimeError('{0} command "{1}".'.format('Invalid' if not cand else 'Ambiguous', raw)) def run_has(self, what, arg): if what in ['command']: raise SystemExit(0 if arg in self.commands else 1) if what in ['config', 'configuration']: raise SystemExit(0 if arg in self.build_configurations else 1) raise SystemExit(2) def run(self): try: if self.__tests is None: self.test('test') if not self.__packagers: self.package(*mirbuild.packaging.PackagingFactory.create_all(self.env)) dc = mirbuild.cache.Cache('dependencies') self._deps.set_cache(dc) self.__configurecache.register(dc) rc = mirbuild.cache.Cache('test_runners') for runner in self.__test_runners.itervalues(): try: runner.set_cache(rc) except Exception as ex: sys.stderr.write(str(ex) + '\n') self.__configurecache.register(rc) self._deps.add_options(self.__parser, nomerge = self.opt.nodeps) for name, runner in self.__test_runners.iteritems(): if self.env.has_section('test:' + name): runner.state_merge(self.env.get_section('test:' + name)) runner.add_options(self.__parser) if self.__packagers: self.add_option('--packager', dest = 'packager', type = 'choice', choices = self.__packagers.keys(), defaultstr = len(self.__packagers) == 1, default = self.__packagers.keys()[0] if len(self.__packagers) == 1 else None, metavar = 'PKG', help = 'selected packager') for name, pkg in self.__packagers.iteritems(): sec = 'packaging:' + name if self.env.has_section(sec): pkg.state_merge(self.env.get_section(sec)) pkg.add_options(self.__parser) self.run_package = self.do_package self.__parser.set_usage(self.__usage()) args = self.__parser.parse_args()[1] if self.has_build_configs: self.opt.ensure_value('configuration', self.__default_build_config()) if self.opt.help or len(args) < 1: self.__parser.print_help() raise SystemExit(0) if self.has_build_configs and self.build_config not in self.build_configurations: raise RuntimeError('Invalid build configuration "{0}".'.format(self.build_config)) command = self.__expand_command(args[0]) command_method = getattr(self, 'run_' + command) if command not in self.noapply_commands: self.__apply_paths() self._deps.apply(self) if command not in self.nocache_commands: self.__configurecache.save() self.env.vsay('''****************************** Config : {0} Action : {1} ******************************'''.format(self.build_config if self.has_build_configs else '(none)', command)) command_method(*args[1:]) except RuntimeError as ex: if self.opt.debug: raise sys.stderr.write('*** ERROR: ' + str(ex) + '\n') raise SystemExit(1) except KeyboardInterrupt: if self.opt.debug: raise sys.stderr.write('*** INTERRUPTED\n') raise SystemExit(1) def run_meta(self): meta = { 'project': self.project_name, 'commands': self.commands, 'dependencies': self._deps.meta, } if self.__packagers: meta['packaging'] = {} for name, p in self.__packagers.iteritems(): meta['packaging'][name] = p.meta try: info = mirbuild.version.VersionInfoFactory.create() meta['version'] = info.upstream_version() except RuntimeError: pass print json.dumps(meta, indent = 4) def run_build(self): self.run_configure() self._run_plugins('pre_build') self._run_plugins('build') self.do_build() self._run_plugins('post_build') def run_test(self): self.run_build() self._run_plugins('pre_test') self._run_plugins('test') self.do_test() self._run_plugins('post_test') def run_install(self): self.run_build() self._run_plugins('pre_install') self._run_plugins('install') self.do_install() self.__install_files() self._run_plugins('post_install') # TODO # def run_coverage(self): # self.run_test() # self.do_coverage() # this is just an alias def run_distclean(self): self.run_realclean() def run_realclean(self): for t in self.tests: t.clean() self._run_plugins('pre_realclean', reverse = True) self.do_realclean() self._run_plugins('realclean', reverse = True) self._run_plugins('post_realclean', reverse = True) self.env.remove_files(self.__configurecache.filename) self.env.remove_trees('build') for v in self.__versions: v['file'].clean() def run_clean(self): self._run_plugins('pre_clean', reverse = True) self.do_clean() self._run_plugins('clean', reverse = True) self._run_plugins('post_clean', reverse = True) for t in self.tests: t.clean() def __apply_paths(self): for opt in ['include_path', 'library_path']: meth = getattr(self, 'add_' + opt, None) if meth is not None: for path in getattr(self.opt, opt): meth(mirbuild.dependency.CLibraryDependency.validated_path(path, env = self.env)) def run_configure(self): for v in self.__versions: v['file'].generate(v['info']) self._run_plugins('pre_configure') self._run_plugins('configure') self.do_configure() self._run_plugins('post_configure') def do_test(self): for t in self.tests: t.configure() t.build() obs = mirbuild.test.TestObserver() for t in self.tests: t.run(obs) if obs.num_total > 0: self.env.say(obs.report()) if obs.num_failed > 0: raise SystemExit(1) elif self.tests: raise RuntimeError('No test runs observed.') def do_package(self): self._run_plugins('pre_package') self.prepare_package() self._run_plugins('package') self.packager.package() self._run_plugins('post_package') def prepare_package(self): pass
class DebianPackaging(mirbuild.packaging.Packaging): name = 'debian' def __init__(self, env, rules=None, compat=None, options=None): self.__opt = LocalOptions() self._env = env self.__pkg = {} self.__rules = DebianRules(verbose=env.verbose, debug=env.debug) if rules is None else rules self.__compat = DebianCompat() if compat is None else compat self.__options = DebianOptions() if options is None else options self.__generated = [self.__rules, self.__compat, self.__options] @property def meta(self): try: return DebianControl().meta except IOError: return None @property def rules(self): return self.__rules @property def compat(self): return self.__compat @property def options(self): return self.__options @staticmethod def can_package(): return os.path.exists(DebianControl.control_file) def add_options(self, parser): deb = OptionGroup(parser, "Debian Packaging Options") self.__opt.add_option(deb, '--debian-pkg-suffix', type='string', dest='suffix', metavar='STRING', help='append suffix to package version') self.__opt.add_option(deb, '--debian-pkg-changelog-message', type='string', dest='changelog', metavar='STRING', help='use this string as changelog message') self.__opt.add_option( deb, '--debian-pkg-buildpackage-args', type='string', dest='buildpackage_args', metavar='STRING', help='JSON encoded list of arguments to pass to dpkg-buildpackage', default='["-tc"]') self.__opt.add_bool_option( deb, '--debian-pkg-keep-rules', dest='keep_rules', help='keep auto-generated debian/rules file') parser.add_option_group(deb) def get_package_info(self, package): return DebianControl().package_info(package) def __prepare_packge(self): for pkg, val in self.__pkg.iteritems(): args = {} for k in 'uid gid dir svc'.split(): if val.has_key(k): args[k] = val[k] if args: self.add_file_generator(DebianPostinst(pkg, **args)) self.add_file_generator(DebianPostrm(pkg, **args)) def package(self): self.__prepare_packge() try: for f in self.__generated: self._env.dbg("creating " + f.filename) f.create() if self.__opt.keep_rules and isinstance(f, DebianRules): f.keep() changelog_copy = ScopedFileCopy('debian/changelog', create=False) if self.__opt.suffix is not None: changelog_copy.create() msg = self.__opt.changelog if self.__opt.changelog is not None else 'Generated by mirbuild.' self._env.execute(self._env.tool('dch'), '-l', self.__opt.suffix, msg) if self.__opt.buildpackage_args == '': args = [] else: try: args = json.loads(self.__opt.buildpackage_args) except ValueError as ex: raise RuntimeError( 'Cannot parse buildpackage arguments ("{0}"): {1}'. format(self.__opt.buildpackage_args, ex)) self._env.execute(self._env.tool('dpkg-buildpackage'), *args) finally: for f in self.__generated: self._env.dbg("removing " + f.filename) f.remove() def state_merge(self, value): self.__opt.state_merge(value) def add_file_generator(self, *file_generators): self.__generated += list(file_generators) def __append_pkg(self, package, key, value): if package is None: package = DebianControl().package if not self.__pkg.has_key(package): self.__pkg[package] = {} if not self.__pkg[package].has_key(key): self.__pkg[package][key] = [] self.__pkg[package][key].append(value) def create_group(self, groupname, package=None): self.__append_pkg(package, 'gid', groupname) def create_user(self, username, group='users', home=None, desc=None, package=None, allow_login=False): ui = mirbuild.packaging.UserInfo() ui.name = username ui.group = group ui.home = home ui.desc = desc ui.allow_login = allow_login self.__append_pkg(package, 'uid', ui) def create_dir(self, dirname, user, group, mode=0755, package=None):
class BoostTestRunner(TestRunner): name = 'boost' def __init__(self, env): TestRunner.__init__(self, env) self.__opt = LocalOptions('boost') def add_options(self, parser): boost = OptionGroup(parser, "Boost Test Options") self.__opt.add_option( boost, '--boost-test-output-dir', type='string', dest='output_dir', metavar='PATH', help='write test log/result files to this directory') self.__opt.add_option(boost, '--boost-test-log-format', type='string', dest='log_format', metavar='FORMAT', help='log output format (HRF, XML)') self.__opt.add_option( boost, '--boost-test-log-sink', type='string', dest='log_sink', metavar='FILE', help='stdout/stderr or file pattern for log files') self.__opt.add_option( boost, '--boost-test-log-level', type='string', dest='log_level', metavar='LEVEL', help='test log level (e.g. all, warning, error, nothing)') self.__opt.add_bool_option( boost, '--boost-test-show-progress', dest='show_progress', help='display progress indicator during test run') parser.add_option_group(boost) def set_cache(self, cache): cache.register(self.__opt) def __output_file(self, basedir, template, name): path = re.sub('\{name\}', name, template) if os.path.dirname(path) == '' and self.__opt.output_dir: path = ofspath.join(self.__opt.output_dir, path) if not os.path.isabs(path): path = os.path.normpath(os.path.join(basedir, path)) return path def execute(self, dir, tests, observer): genopt = [] if self.__opt.log_format: genopt.append('--log_format=' + self.__opt.log_format) if self.__opt.log_level: genopt.append('--log_level=' + self.__opt.log_level) if self.__opt.show_progress: genopt.append('--show_progress') dir = os.path.join(dir, self._env.bin_dir) scd = ScopedChdir(dir) for t in tests: assert isinstance(t, Test) opt = genopt[:] if self.__opt.log_sink: sink = self.__opt.log_sink if sink not in ('stdout', 'stderr'): if self.__opt.show_progress: raise RuntimeError( 'using --boost-test-show-progress corrupts output files' ) sink = self.__output_file(scd.original_dir, sink, t.name) opt.append('--log_sink=' + sink) opt += t.args self._env.say('\n=== Running Test [ {0} ] ===\n'.format(t.name)) t.start_timer() try: self._env.execute(os.path.realpath(t.test), *opt) t.set_passed() except RuntimeError: t.set_passed(False) self._env.dbg('Test {0} finished in {1:.2f} seconds.'.format( t.name, t.duration)) observer.add_test(t) def state_merge(self, value): self.__opt.state_merge(value)
class DebianPackaging(mirbuild.packaging.Packaging): name = 'debian' def __init__(self, env, rules = None, compat = None, options = None): self.__opt = LocalOptions() self._env = env self.__pkg = {} self.__rules = DebianRules(verbose = env.verbose, debug = env.debug) if rules is None else rules self.__compat = DebianCompat() if compat is None else compat self.__options = DebianOptions() if options is None else options self.__generated = [self.__rules, self.__compat, self.__options] @property def meta(self): try: return DebianControl().meta; except IOError: return None @property def rules(self): return self.__rules @property def compat(self): return self.__compat @property def options(self): return self.__options @staticmethod def can_package(): return os.path.exists(DebianControl.control_file) def add_options(self, parser): deb = OptionGroup(parser, "Debian Packaging Options") self.__opt.add_option(deb, '--debian-pkg-suffix', type = 'string', dest = 'suffix', metavar = 'STRING', help = 'append suffix to package version') self.__opt.add_option(deb, '--debian-pkg-changelog-message', type = 'string', dest = 'changelog', metavar = 'STRING', help = 'use this string as changelog message') self.__opt.add_option(deb, '--debian-pkg-buildpackage-args', type = 'string', dest = 'buildpackage_args', metavar = 'STRING', help = 'JSON encoded list of arguments to pass to dpkg-buildpackage', default = '["-tc"]') self.__opt.add_bool_option(deb, '--debian-pkg-keep-rules', dest = 'keep_rules', help = 'keep auto-generated debian/rules file') parser.add_option_group(deb) def get_package_info(self, package): return DebianControl().package_info(package) def __prepare_packge(self): for pkg, val in self.__pkg.iteritems(): args = {} for k in 'uid gid dir svc'.split(): if val.has_key(k): args[k] = val[k] if args: self.add_file_generator(DebianPostinst(pkg, **args)) self.add_file_generator(DebianPostrm(pkg, **args)) def package(self): self.__prepare_packge() try: for f in self.__generated: self._env.dbg("creating " + f.filename) f.create() if self.__opt.keep_rules and isinstance(f, DebianRules): f.keep() changelog_copy = ScopedFileCopy('debian/changelog', create = False) if self.__opt.suffix is not None: changelog_copy.create() msg = self.__opt.changelog if self.__opt.changelog is not None else 'Generated by mirbuild.' self._env.execute(self._env.tool('dch'), '-l', self.__opt.suffix, msg) if self.__opt.buildpackage_args == '': args = [] else: try: args = json.loads(self.__opt.buildpackage_args) except ValueError as ex: raise RuntimeError('Cannot parse buildpackage arguments ("{0}"): {1}'.format(self.__opt.buildpackage_args, ex)) self._env.execute(self._env.tool('dpkg-buildpackage'), *args) finally: for f in self.__generated: self._env.dbg("removing " + f.filename) f.remove() def state_merge(self, value): self.__opt.state_merge(value) def add_file_generator(self, *file_generators): self.__generated += list(file_generators) def __append_pkg(self, package, key, value): if package is None: package = DebianControl().package if not self.__pkg.has_key(package): self.__pkg[package] = {} if not self.__pkg[package].has_key(key): self.__pkg[package][key] = [] self.__pkg[package][key].append(value) def create_group(self, groupname, package = None): self.__append_pkg(package, 'gid', groupname) def create_user(self, username, group = 'users', home = None, desc = None, package = None, allow_login = False): ui = mirbuild.packaging.UserInfo() ui.name = username ui.group = group ui.home = home ui.desc = desc ui.allow_login = allow_login self.__append_pkg(package, 'uid', ui) def create_dir(self, dirname, user, group, mode = 0755, package = None):
class Dependencies(object): __dependency_group_classes = set([DependencyGroup]) def __init__(self, env, default_dependency_class): self.__env = env self.__default = default_dependency_class self.__groups = {} self.__opt = LocalOptions() @classmethod def register_group_class(cls, dependency_group_class): cls.__dependency_group_classes.add(dependency_group_class) def add(self, *args): for arg in args: if isinstance(arg, basestring): arg = self.__default(arg) assert isinstance(arg, Dependency) group = None for cls in self.__group_classes(): if cls.can_manage(arg): if cls not in self.__groups: self.__groups[cls] = cls(self.__env) group = self.__groups[cls] break if group is not None: group.add(arg) else: raise RuntimeError('No class found that can manage ' + arg.__class__.__name__) def any_is_a(self, cls): return any(grp.any_is_a(cls) for grp in self.__groups.itervalues()) def __group_classes(self): src = [] for group in self.__dependency_group_classes: for managed in group.managed_classes: src.append((managed, group)) dst = [] while src: cls = src.pop(0) if any(issubclass(x[0], cls[0]) for x in src): src.append(cls) elif cls[1] not in dst: dst.append(cls[1]) return dst def set_cache(self, cache): for grp in self.__groups.itervalues(): try: grp.set_cache(cache) except Exception as ex: sys.stderr.write(str(ex) + '\n') def add_options(self, parser, nomerge = False): if self.__groups: if not nomerge and self.__env.has('dependencies', 'search_path'): self.__opt.state_merge({ 'deps_from': self.__env.get('dependencies', 'search_path') }) og = OptionGroup(parser, "General Dependency Options") self.__opt.add_option(og, '--with-deps-from', type = 'string', dest = 'deps_from', metavar = 'PATH', help = 'scan this path for dependencies') parser.add_option_group(og) for cls in sorted(self.__groups, key=operator.attrgetter('__name__')): self.__groups[cls].add_options(parser, nomerge) def __autoresolve(self): if any(group.has_unsatisfied_dependencies for group in self.__groups.itervalues()): self.__env.say('Resolving dependencies...') path = self.__opt.deps_from fast = True if path.startswith('slow:'): path = path[5:] fast = False found = mirbuild.walk.Walker(path, fastscan = fast, env = self.__env).dependencies for group in self.__groups.itervalues(): group.set_unsatisfied_dependencies(found) def apply(self, obj): if self.__groups and self.__opt.deps_from: self.__autoresolve() for grp in self.__groups.itervalues(): grp.apply(obj) def get_dependency_group(self, cls): return self.__groups.get(cls) @property def meta(self): m = [] for grp in self.__groups.itervalues(): m += grp.meta return m
class CLibraryDependency(Dependency): def __init__(self, name): Dependency.__init__(self, name) self.__opt = LocalOptions(name) @staticmethod def isdir(basepath, env = None, *path): if basepath is not None: result = os.path.realpath(os.path.join(os.path.expanduser(basepath), *path)) return os.path.isdir(result) else: raise ValueError @staticmethod def validated_path(basepath, env = None, *path): """ Return the given path prefixed with the path given by the --with-... options If an environment is given as keyword parameter 'env', the existence of the returned path is checked and a warning is output through the environment in case it does not exist. """ if basepath is not None: result = os.path.realpath(os.path.join(os.path.expanduser(basepath), *path)) if env is not None and not os.path.isdir(result): env.warn(result + ' not found.') return result else: raise ValueError def _validated_path(self, env = None, *path): return CLibraryDependency.validated_path(self._path, env, *path) def _isdir(self, env = None, *path): return CLibraryDependency.isdir(self._path, env, *path) def set_cache(self, cache): cache.register(self.__opt) def add_options(self, parser): self.__opt.add_option(parser, '--with-{0}'.format(self.name), type = 'string', dest = 'path', metavar = 'PATH', help = 'use {0} includes/libraries from this path'.format(self.name)) def apply(self, obj): if self._path: # Adding includes is simple, in or out of source makes no difference ipath = self._validated_path(obj.env, 'include') obj.add_include_path(ipath) obj.env.dbg('Added inc-path: {0}'.format(ipath)) # Adding library paths is a little more complex since the dependencies could # have been build either in-source or out-of-source and we don't really know. # We'll use a little bit of educated guess work try try and figure it out! # This is the in-source library path path = 'lib' path_exists = self._isdir(obj.env, path) # This is the out-of-source library path oospath = os.path.join(obj.env.oosbuild_dir, path) oospath_exists = self._isdir(obj.env, oospath) # Ok, we now need to try and find the most suitable library path: # - If the most suitable library path exists use it # - If it doesn't exist but there's an alternative use that # - If neither exist fallback on the most suitable (warning it's not there) if obj.env.out_of_source: if oospath_exists or not path_exists: path = oospath else: if not path_exists and oospath_exists: path = oospath vpath = self._validated_path(obj.env, path) # Add the verified path we decided to use (with a little bit of debug for good measure) obj.add_library_path(vpath) obj.env.dbg('Added lib-path: {0}'.format(vpath)) @property def _path(self): return getattr(self.__opt, 'path', None) def state_merge(self, value): self.__opt.state_merge({ "path": value }) @property def has_options(self): return True @property def is_satisfied(self): return self._path is not None
class BoostTestRunner(TestRunner): name = 'boost' def __init__(self, env): TestRunner.__init__(self, env) self.__opt = LocalOptions('boost') def add_options(self, parser): boost = OptionGroup(parser, "Boost Test Options") self.__opt.add_option(boost, '--boost-test-output-dir', type = 'string', dest = 'output_dir', metavar = 'PATH', help = 'write test log/result files to this directory') self.__opt.add_option(boost, '--boost-test-log-format', type = 'string', dest = 'log_format', metavar = 'FORMAT', help = 'log output format (HRF, XML)') self.__opt.add_option(boost, '--boost-test-log-sink', type = 'string', dest = 'log_sink', metavar = 'FILE', help = 'stdout/stderr or file pattern for log files') self.__opt.add_option(boost, '--boost-test-log-level', type = 'string', dest = 'log_level', metavar = 'LEVEL', help = 'test log level (e.g. all, warning, error, nothing)') self.__opt.add_bool_option(boost, '--boost-test-show-progress', dest = 'show_progress', help = 'display progress indicator during test run') parser.add_option_group(boost) def set_cache(self, cache): cache.register(self.__opt) def __output_file(self, basedir, template, name): path = re.sub('\{name\}', name, template) if os.path.dirname(path) == '' and self.__opt.output_dir: path = ofspath.join(self.__opt.output_dir, path) if not os.path.isabs(path): path = os.path.normpath(os.path.join(basedir, path)) return path def execute(self, dir, tests, observer): genopt = [] if self.__opt.log_format: genopt.append('--log_format=' + self.__opt.log_format) if self.__opt.log_level: genopt.append('--log_level=' + self.__opt.log_level) if self.__opt.show_progress: genopt.append('--show_progress') dir = os.path.join(dir, self._env.bin_dir) scd = ScopedChdir(dir) for t in tests: assert isinstance(t, Test) opt = genopt[:] if self.__opt.log_sink: sink = self.__opt.log_sink if sink not in ('stdout', 'stderr'): if self.__opt.show_progress: raise RuntimeError('using --boost-test-show-progress corrupts output files') sink = self.__output_file(scd.original_dir, sink, t.name) opt.append('--log_sink=' + sink) opt += t.args self._env.say('\n=== Running Test [ {0} ] ===\n'.format(t.name)) t.start_timer() try: self._env.execute(os.path.realpath(t.test), *opt) t.set_passed() except RuntimeError: t.set_passed(False) self._env.dbg('Test {0} finished in {1:.2f} seconds.'.format(t.name, t.duration)) observer.add_test(t) def state_merge(self, value): self.__opt.state_merge(value)
class Project(object): environment_class = mirbuild.environment.Environment test_runner_class = mirbuild.test.BoostTestRunner default_dependency_class = None nocache_commands = set('meta'.split()) noapply_commands = set('meta clean realclean distclean'.split()) def __init__(self, name, **opts): self.__configurecache = mirbuild.cache.Cache(filename='configure.json') self.__options = opts self.__tests = None self.__versions = [] self.__plugins = [] self.__install = [] self.__packagers = {} self.__test_runners = {} try: self.__configurecache.load() except Exception: # if we can't load the cache, so be it pass self.__parser = OptionParser(add_help_option=False) self.__general_options = OptionGroup(self.__parser, "General Options") self.__parser.add_option_group(self.__general_options) self.opt = LocalOptions('general') self.__configurecache.register(self.opt) if self.has_build_configs: self.add_option('-c|--configuration', dest='configuration', type='string', defaultstr=False, metavar='CFG', help='selected build configuration') self.add_bool_option('-h|--help', dest='help', help='show this help message and exit', cache=False) for opt, dest, help in [ ('-d|--debug', 'debug', 'debug build.py execution'), ('-q|--quiet', 'quiet', 'be quiet'), ('-v|--verbose', 'verbose', 'verbose compiler/packaging output'), ('--trace', 'trace', 'trace build process (if supported by the builder)'), ('--nodeps', 'nodeps', "don't use dependencies from .mirbuildrc"), ('--noconfig', 'noconfig', "don't use .mirbuildrc files at all"), ('--noenv', 'noenv', "don't honour environment variables"), ('--called-by-packager', 'called_by_packager', "option indicating that build.py is being invoked by a mirbuild packager" ), ]: self.add_bool_option(opt, dest=dest, help=help, cache=False) for o in opt.split('|'): if o in sys.argv: self.opt.set_value(dest, True) self.__env = self.environment_class(name) self.__env.set_options(self.opt) self._deps = mirbuild.dependency.Dependencies( self.__env, self.default_dependency_class) if not self.opt.noconfig: self.__env.read_config() if self.__env.has('build', 'prefix'): self.opt.state_merge( {'prefix': self.__env.get('build', 'prefix')}) self.opt.ensure_value('jobs', self.__env.get('build', 'parallel', 'auto')) self.add_option('-j|--jobs', dest='jobs', type='string', metavar='NUM', cache=False, help='number of parallel jobs to execute if possible') self.add_option('--prefix', dest='prefix', type='string', default=self.default_install_path, metavar='PATH', help='install prefix for this project') self.add_option('--install-destdir', dest='install_destdir', type='string', metavar='PATH', help='install files to this path') self.add_option('-b|--build-mode', dest='build_mode', type='choice', choices=['in', 'out'], default='in', metavar="MODE", help='[in|out] source build mode') for o in [('-I|--include-path', 'include', 'C_INCLUDE_PATH CPLUS_INCLUDE_PATH'.split()), ('-L|--library-path', 'library', 'LIBRARY_PATH'.split())]: var = o[1] + '_path' if hasattr(self, 'add_' + var): path = [] if not self.opt.noenv: for e in o[2]: path += [ x for x in os.environ.get(e, '').split( os.path.pathsep) if x ] path += [ x for x in self.env.get('build', var, '').split( os.path.pathsep) if x ] self.opt.state_merge({var: path}) self.add_option(o[0], type='string', dest=var, multi=True, metavar='PATH', help='use additional ' + o[1] + ' path') @property def _configure_cache(self): return self.__configurecache @property def _option_parser(self): return self.__parser @property def default_install_path(self): return '/usr/local' @property def ident(self): return self.__parser.get_prog_name() @property def options(self): return self.__options @property def env(self): return self.__env @property def commands(self): return self.methlist('run_(\w+)') @property def build_configurations(self): return self.methlist('configure_(\w+)') @property def tests(self): return self.__tests @property def packager(self): return self.__packagers[self.opt.packager] @property def project_name(self): return self.__env.project_name @property def build_config(self): return getattr(self.opt, 'configuration', None) @property def has_thrift_dependency(self): # We need to know if there's a thrift dependency as we'll need to # configure some additional things for Visual Studio if we do # [mhx] We could cache the result, but I'd rather not bother with that now... return self._deps.any_is_a(mirbuild.ThriftDependency) def prefixpath(self, path): if os.path.isabs(path): return path else: return os.path.join(self.opt.prefix, path) def installpath(self, path, isdir=False, mkdir=False): destdir = path if isdir else os.path.split(path)[0] if os.path.isabs(destdir): if self.opt.install_destdir is not None: destdir = os.path.join(self.opt.install_destdir, rootrelpath(destdir)) else: destdir = destdir else: if self.opt.install_destdir is not None: destdir = os.path.join(self.opt.install_destdir, rootrelpath(self.opt.prefix), destdir) else: destdir = os.path.join(self.opt.prefix, destdir) if mkdir: try: os.makedirs(destdir) except OSError as ex: if ex.errno != errno.EEXIST: raise return destdir if isdir else os.path.join(destdir, os.path.split(path)[1]) def __usage(self): usage = 'Usage: %prog [Options] <Command>' usage += '\n\nCommands: {0}'.format(', '.join(self.commands)) if self.has_build_configs: usage += '\n\nBuild Configurations: {0}'.format(', '.join(map(lambda x: (x + ' [*]') \ if x == self.__default_build_config() else x, self.build_configurations))) return usage def __default_build_config(self): if not self.has_build_configs: return None if self.opt.configuration is not None: return self.opt.configuration try: return self.env.get('build', 'configuration') except Exception: pass if 'release' in self.build_configurations: return 'release' return self.build_configurations[0] def methlist(self, match): list = [] run = re.compile(match) for method in dir(self): m = run.match(method) if m is not None and getattr(self, method) is not None: list.append(m.group(1)) list.sort() return list def add_option(self, *args, **kw): self.opt.add_option(self.__general_options, *args, **kw) def add_bool_option(self, *args, **kw): self.opt.add_bool_option(self.__general_options, *args, **kw) def depends(self, *deps): self._deps.add(*deps) def test(self, *args, **kwargs): filt = kwargs.get('filter', lambda x: True) recurse = kwargs.get('recurse', True) runner_class = kwargs.get('runner', self.test_runner_class) test_builders = [] if self.__tests is None: self.__tests = [] for arg in args: if isinstance(arg, mirbuild.test.TestBuilder): test_builders.append(arg) elif isinstance(arg, basestring): dirs = [] for e in glob.glob(arg): if os.path.isdir(e): if filt( e ) and self.test_builder_class.looks_like_test_dir(e): dirs.append(e) if recurse: for root, ds, fs in os.walk(e): for d in ds: path = os.path.join(root, d) if filt( path ) and self.test_builder_class.looks_like_test_dir( path): dirs.append(path) dirs.sort() for d in dirs: test_builders.append(self.test_builder_class(self.env, d)) else: test_builders.append(self.test_builder_class(self.env, *arg)) if test_builders: if runner_class.name not in self.__test_runners: self.__test_runners[runner_class.name] = runner_class(self.env) runner = self.__test_runners[runner_class.name] for tb in test_builders: self.__tests.append( mirbuild.test.TestWrapper(builder=tb, runner=runner)) def package(self, *args): for arg in args: assert isinstance(arg, mirbuild.packaging.Packaging) assert not self.__packagers.has_key(arg.name) self.__packagers[arg.name] = arg def install(self, source, destdir, glob=True): i = InstallRule() i.source = [source] if isinstance(source, basestring) else source i.destdir = destdir i.glob = glob self.__install.append(i) def __install_files(self): for i in self.__install: destdir = self.installpath(i.destdir, isdir=True, mkdir=True) source = [] for src in i.source: if i.glob: source += glob.glob(src) else: source.append(src) for src in source: dst = os.path.join(destdir, os.path.split(src)[1]) self.env.vsay('installing {0} -> {1}'.format(src, dst)) if os.path.isdir(src): shutil.copytree(src, dst, symlinks=True) else: shutil.copy2(src, dst) def version(self, file=os.path.join('src', 'version.h'), info=None, **opts): if isinstance(file, basestring): file = mirbuild.version.VersionFileFactory.create( self.env, file, **opts) if info is None: info = mirbuild.version.VersionInfoFactory.create() assert isinstance(file, mirbuild.version.VersionFile) assert isinstance(info, mirbuild.version.VersionInfo) self.__versions.append({'file': file, 'info': info}) def add_plugin(self, *args): for arg in args: assert isinstance(arg, mirbuild.plugin.Plugin) self.__plugins.append(arg) def _run_plugins(self, meth, reverse=False): for plugin in reversed(self.__plugins) if reverse else self.__plugins: self.env.dbg("running plugin method {0}.{1}".format( plugin.__class__.__name__, meth)) getattr(plugin, meth)(self) @property def has_build_configs(self): return len(self.build_configurations) > 0 def __expand_command(self, raw): if raw in self.commands: return raw cand = [cmd for cmd in self.commands if cmd.startswith(raw)] if len(cand) == 1: return cand[0] raise RuntimeError('{0} command "{1}".'.format( 'Invalid' if not cand else 'Ambiguous', raw)) def run_has(self, what, arg): if what in ['command']: raise SystemExit(0 if arg in self.commands else 1) if what in ['config', 'configuration']: raise SystemExit(0 if arg in self.build_configurations else 1) raise SystemExit(2) def run(self): try: if self.__tests is None: self.test('test') if not self.__packagers: self.package( *mirbuild.packaging.PackagingFactory.create_all(self.env)) dc = mirbuild.cache.Cache('dependencies') self._deps.set_cache(dc) self.__configurecache.register(dc) rc = mirbuild.cache.Cache('test_runners') for runner in self.__test_runners.itervalues(): try: runner.set_cache(rc) except Exception as ex: sys.stderr.write(str(ex) + '\n') self.__configurecache.register(rc) self._deps.add_options(self.__parser, nomerge=self.opt.nodeps) for name, runner in self.__test_runners.iteritems(): if self.env.has_section('test:' + name): runner.state_merge(self.env.get_section('test:' + name)) runner.add_options(self.__parser) if self.__packagers: self.add_option('--packager', dest='packager', type='choice', choices=self.__packagers.keys(), defaultstr=len(self.__packagers) == 1, default=self.__packagers.keys()[0] if len(self.__packagers) == 1 else None, metavar='PKG', help='selected packager') for name, pkg in self.__packagers.iteritems(): sec = 'packaging:' + name if self.env.has_section(sec): pkg.state_merge(self.env.get_section(sec)) pkg.add_options(self.__parser) self.run_package = self.do_package self.__parser.set_usage(self.__usage()) args = self.__parser.parse_args()[1] if self.has_build_configs: self.opt.ensure_value('configuration', self.__default_build_config()) if self.opt.help or len(args) < 1: self.__parser.print_help() raise SystemExit(0) if self.has_build_configs and self.build_config not in self.build_configurations: raise RuntimeError('Invalid build configuration "{0}".'.format( self.build_config)) command = self.__expand_command(args[0]) command_method = getattr(self, 'run_' + command) if command not in self.noapply_commands: self.__apply_paths() self._deps.apply(self) if command not in self.nocache_commands: self.__configurecache.save() self.env.vsay('''****************************** Config : {0} Action : {1} ******************************'''.format( self.build_config if self.has_build_configs else '(none)', command)) command_method(*args[1:]) except RuntimeError as ex: if self.opt.debug: raise sys.stderr.write('*** ERROR: ' + str(ex) + '\n') raise SystemExit(1) except KeyboardInterrupt: if self.opt.debug: raise sys.stderr.write('*** INTERRUPTED\n') raise SystemExit(1) def run_meta(self): meta = { 'project': self.project_name, 'commands': self.commands, 'dependencies': self._deps.meta, } if self.__packagers: meta['packaging'] = {} for name, p in self.__packagers.iteritems(): meta['packaging'][name] = p.meta try: info = mirbuild.version.VersionInfoFactory.create() meta['version'] = info.upstream_version() except RuntimeError: pass print json.dumps(meta, indent=4) def run_build(self): self.run_configure() self._run_plugins('pre_build') self._run_plugins('build') self.do_build() self._run_plugins('post_build') def run_test(self): self.run_build() self._run_plugins('pre_test') self._run_plugins('test') self.do_test() self._run_plugins('post_test') def run_install(self): self.run_build() self._run_plugins('pre_install') self._run_plugins('install') self.do_install() self.__install_files() self._run_plugins('post_install') # TODO # def run_coverage(self): # self.run_test() # self.do_coverage() # this is just an alias def run_distclean(self): self.run_realclean() def run_realclean(self): for t in self.tests: t.clean() self._run_plugins('pre_realclean', reverse=True) self.do_realclean() self._run_plugins('realclean', reverse=True) self._run_plugins('post_realclean', reverse=True) self.env.remove_files(self.__configurecache.filename) self.env.remove_trees('build') for v in self.__versions: v['file'].clean() def run_clean(self): self._run_plugins('pre_clean', reverse=True) self.do_clean() self._run_plugins('clean', reverse=True) self._run_plugins('post_clean', reverse=True) for t in self.tests: t.clean() def __apply_paths(self): for opt in ['include_path', 'library_path']: meth = getattr(self, 'add_' + opt, None) if meth is not None: for path in getattr(self.opt, opt): meth( mirbuild.dependency.CLibraryDependency.validated_path( path, env=self.env)) def run_configure(self): for v in self.__versions: v['file'].generate(v['info']) self._run_plugins('pre_configure') self._run_plugins('configure') self.do_configure() self._run_plugins('post_configure') def do_test(self): for t in self.tests: t.configure() t.build() obs = mirbuild.test.TestObserver() for t in self.tests: t.run(obs) if obs.num_total > 0: self.env.say(obs.report()) if obs.num_failed > 0: raise SystemExit(1) elif self.tests: raise RuntimeError('No test runs observed.') def do_package(self): self._run_plugins('pre_package') self.prepare_package() self._run_plugins('package') self.packager.package() self._run_plugins('post_package') def prepare_package(self): pass