def __init__(self, all_tests, test_defaults=None): self._tests_by_path = OrderedDefaultDict(list) self._tests_by_flavor = defaultdict(set) self._test_dirs = set() with open(all_tests, 'rb') as fh: test_data = pickle.load(fh) defaults = None if test_defaults: with open(test_defaults, 'rb') as fh: defaults = pickle.load(fh) for path, tests in test_data.items(): for metadata in tests: if defaults: defaults_manifests = [metadata['manifest']] ancestor_manifest = metadata.get('ancestor-manifest') if ancestor_manifest: defaults_manifests.append(ancestor_manifest) for manifest in defaults_manifests: manifest_defaults = defaults.get(manifest) if manifest_defaults: metadata = manifestparser.combine_fields( manifest_defaults, metadata) self._tests_by_path[path].append(metadata) self._test_dirs.add(os.path.dirname(path)) flavor = metadata.get('flavor') self._tests_by_flavor[flavor].add(path)
def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) self._compile_env_gen_files = ( '*.c', '*.cpp', '*.h', '*.inc', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' # The preprocessor including source-repo.h and buildid.h creates # dependencies that aren't specified by moz.build and cause errors # in Tup. Express these as a group dependency. self._early_generated_files = '$(MOZ_OBJ_ROOT)/<early-generated-files>' self._built_in_addons = set() self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json' # application.ini.h is a special case since we need to process # the FINAL_TARGET_PP_FILES for application.ini before running # the GENERATED_FILES script, and tup doesn't handle the rules # out of order. Similarly, dependentlibs.list uses libxul as # an input, so must be written after the rule for libxul. self._delayed_files = ('application.ini.h', 'dependentlibs.list', 'dependentlibs.list.gtest')
def __init__(self, config): self.populate_logger() self.config = config mozinfo.find_and_update_from_json(config.topobjdir) # Python 2.6 doesn't allow unicode keys to be used for keyword # arguments. This gross hack works around the problem until we # rid ourselves of 2.6. self.info = {} for k, v in mozinfo.info.items(): if isinstance(k, unicode): k = k.encode('ascii') self.info[k] = v self._libs = OrderedDefaultDict(list) self._binaries = OrderedDict() self._linkage = [] self._static_linking_shared = set() # Keep track of external paths (third party build systems), starting # from what we run a subconfigure in. We'll eliminate some directories # as we traverse them with moz.build (e.g. js/src). subconfigures = os.path.join(self.config.topobjdir, 'subconfigures') paths = [] if os.path.exists(subconfigures): paths = open(subconfigures).read().splitlines() self._external_paths = set(mozpath.normsep(d) for d in paths) # Add security/nss manually, since it doesn't have a subconfigure. self._external_paths.add('security/nss')
def _init(self): CommonBackend._init(self) self._supported_dirs = ( 'services', 'servo', 'startupcache', 'storage', 'taskcluster', 'testing', 'third_party', 'toolkit', 'tools', 'uriloader', 'view', 'widget', 'xpcom', 'xpfe', ) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) self._compile_env_gen_files = ( '*.c', '*.cpp', '*.h', '*.inc', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>'
def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) self._compile_env_gen_files = ( '*.c', '*.cpp', '*.h', '*.inc', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' # The preprocessor including source-repo.h and buildid.h creates # dependencies that aren't specified by moz.build and cause errors # in Tup. Express these as a group dependency. self._early_generated_files = '$(MOZ_OBJ_ROOT)/<early-generated-files>' self._built_in_addons = set() self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
def test_defaults(self): test = OrderedDefaultDict(bool, {'foo': 1 }) self.assertEqual(test['foo'], 1) self.assertEqual(test['qux'], False) self.assertEqual(test.keys(), ['foo', 'qux' ])
def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) # This is a 'group' dependency - All rules that list this as an output # will be built before any rules that list this as an input. self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>'
def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._jar_manifests = OrderedDict() self._manifest_entries = OrderedDefaultDict(list) self._install_manifests = OrderedDefaultDict(InstallManifest)
def test_simple(self): original = OrderedDict(foo=1, bar=2) test = OrderedDefaultDict(bool, original) self.assertEqual(original, test) self.assertEqual(test['foo'], 1) self.assertEqual(test.keys(), ['foo', 'bar' ])
def _init(self): super(FasterMakeBackend, self)._init() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._has_xpidl = False
def _init(self): super(FasterMakeBackend, self)._init() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._l10n_dependencies = OrderedDefaultDict(list) self._has_xpidl = False self._generated_files_map = {} self._generated_files = []
def __init__(self, all_tests, test_defaults=None): self._tests_by_path = OrderedDefaultDict(list) self._tests_by_flavor = defaultdict(set) self._test_dirs = set() with open(all_tests, 'rb') as fh: test_data = pickle.load(fh) defaults = None if test_defaults: with open(test_defaults, 'rb') as fh: defaults = pickle.load(fh) for path, tests in test_data.items(): for metadata in tests: if defaults: defaults_manifests = [metadata['manifest']] ancestor_manifest = metadata.get('ancestor-manifest') if ancestor_manifest: defaults_manifests.append(ancestor_manifest) for manifest in defaults_manifests: manifest_defaults = defaults.get(manifest) if manifest_defaults: metadata = manifestparser.combine_fields(manifest_defaults, metadata) self._tests_by_path[path].append(metadata) self._test_dirs.add(os.path.dirname(path)) flavor = metadata.get('flavor') self._tests_by_flavor[flavor].add(path)
def test_defaults(self): test = OrderedDefaultDict(bool, {"foo": 1}) self.assertEqual(test["foo"], 1) self.assertEqual(test["qux"], False) self.assertEqual(list(test), ["foo", "qux"])
def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) # These are a hack to approximate things that are needed for the # compile phase. self._compile_env_files = ( '*.api', '*.c', '*.cfg', '*.cpp', '*.h', '*.inc', '*.msg', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' self._rust_libs = '$(MOZ_OBJ_ROOT)/<rust-libs>' # The preprocessor including source-repo.h and buildid.h creates # dependencies that aren't specified by moz.build and cause errors # in Tup. Express these as a group dependency. self._early_generated_files = '$(MOZ_OBJ_ROOT)/<early-generated-files>' self._shlibs = '$(MOZ_OBJ_ROOT)/<shlibs>' self._gtests = '$(MOZ_OBJ_ROOT)/<gtest>' self._default_group = '$(MOZ_OBJ_ROOT)/<default>' # The two rust libraries in the tree share many prerequisites, so we need # to prune common dependencies and therefore build all rust from the same # Tupfile. self._rust_outputs = set() self._rust_backend_file = self._get_backend_file( 'toolkit/library/rust') self._built_in_addons = set() self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json'
def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) self._compile_env_gen_files = ( '*.c', '*.cpp', '*.h', '*.inc', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>'
def test_simple(self): original = OrderedDict(foo=1, bar=2) test = OrderedDefaultDict(bool, original) self.assertEqual(original, test) self.assertEqual(test["foo"], 1) self.assertEqual(list(test), ["foo", "bar"])
def __init__(self, all_tests, srcdir, test_defaults=None): self._tests_by_path = OrderedDefaultDict(list) self._tests_by_flavor = defaultdict(set) self._test_dirs = set() self._objdir = os.path.abspath(os.path.join(all_tests, os.pardir)) self._wpt_loaded = False self._srcdir = srcdir with open(all_tests, 'rb') as fh: test_data = pickle.load(fh) defaults = None if test_defaults: with open(test_defaults, 'rb') as fh: defaults = pickle.load(fh) for path, tests in test_data.items(): for metadata in tests: if defaults: defaults_manifests = [metadata['manifest']] ancestor_manifest = metadata.get('ancestor-manifest') if ancestor_manifest: # The (ancestor manifest, included manifest) tuple # contains the defaults of the included manifest, so # use it instead of [metadata['manifest']]. defaults_manifests[0] = (ancestor_manifest, metadata['manifest']) defaults_manifests.append(ancestor_manifest) for manifest in defaults_manifests: manifest_defaults = defaults.get(manifest) if manifest_defaults: metadata = manifestparser.combine_fields( manifest_defaults, metadata) self._tests_by_path[path].append(metadata) self._test_dirs.add(os.path.dirname(path)) flavor = metadata.get('flavor') self._tests_by_flavor[flavor].add(path)
def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._manifest_entries = OrderedDefaultDict(list) self._install_manifests = OrderedDefaultDict(OverwriteInstallManifest) self._dependencies = OrderedDefaultDict(list)
def __init__(self, config): self.populate_logger() self.config = config mozinfo.find_and_update_from_json(config.topobjdir) # Python 2.6 doesn't allow unicode keys to be used for keyword # arguments. This gross hack works around the problem until we # rid ourselves of 2.6. self.info = {} for k, v in mozinfo.info.items(): if isinstance(k, unicode): k = k.encode('ascii') self.info[k] = v self._libs = OrderedDefaultDict(list) self._binaries = OrderedDict() self._linkage = [] self._static_linking_shared = set()
class TestMetadata(object): """Holds information about tests. This class provides an API to query tests active in the build configuration. """ def __init__(self, all_tests, test_defaults=None): self._tests_by_path = OrderedDefaultDict(list) self._tests_by_flavor = defaultdict(set) self._test_dirs = set() with open(all_tests, 'rb') as fh: test_data = pickle.load(fh) defaults = None if test_defaults: with open(test_defaults, 'rb') as fh: defaults = pickle.load(fh) for path, tests in test_data.items(): for metadata in tests: if defaults: defaults_manifests = [metadata['manifest']] ancestor_manifest = metadata.get('ancestor-manifest') if ancestor_manifest: defaults_manifests.append(ancestor_manifest) for manifest in defaults_manifests: manifest_defaults = defaults.get(manifest) if manifest_defaults: metadata = manifestparser.combine_fields( manifest_defaults, metadata) self._tests_by_path[path].append(metadata) self._test_dirs.add(os.path.dirname(path)) flavor = metadata.get('flavor') self._tests_by_flavor[flavor].add(path) def tests_with_flavor(self, flavor): """Obtain all tests having the specified flavor. This is a generator of dicts describing each test. """ for path in sorted(self._tests_by_flavor.get(flavor, [])): yield self._tests_by_path[path] def resolve_tests(self, paths=None, flavor=None, subsuite=None, under_path=None, tags=None): """Resolve tests from an identifier. This is a generator of dicts describing each test. ``paths`` can be an iterable of values to use to identify tests to run. If an entry is a known test file, tests associated with that file are returned (there may be multiple configurations for a single file). If an entry is a directory, or a prefix of a directory containing tests, all tests in that directory are returned. If the string appears in a known test file, that test file is considered. If the path contains a wildcard pattern, tests matching that pattern are returned. If ``under_path`` is a string, it will be used to filter out tests that aren't in the specified path prefix relative to topsrcdir or the test's installed dir. If ``flavor`` is a string, it will be used to filter returned tests to only be the flavor specified. A flavor is something like ``xpcshell``. If ``subsuite`` is a string, it will be used to filter returned tests to only be in the subsuite specified. If ``tags`` are specified, they will be used to filter returned tests to only those with a matching tag. """ if tags: tags = set(tags) def fltr(tests): for test in tests: if flavor: if flavor == 'devtools' and test.get( 'flavor') != 'browser-chrome': continue if flavor != 'devtools' and test.get('flavor') != flavor: continue if subsuite and test.get('subsuite') != subsuite: continue if tags and not (tags & set(test.get('tags', '').split())): continue if under_path and not test['file_relpath'].startswith( under_path): continue # Make a copy so modifications don't change the source. yield dict(test) paths = paths or [] paths = [mozpath.normpath(p) for p in paths] if not paths: paths = [None] candidate_paths = set() for path in sorted(paths): if path is None: candidate_paths |= set(self._tests_by_path.keys()) continue if '*' in path: candidate_paths |= { p for p in self._tests_by_path if mozpath.match(p, path) } continue # If the path is a directory, or the path is a prefix of a directory # containing tests, pull in all tests in that directory. if (path in self._test_dirs or any(p.startswith(path) for p in self._tests_by_path)): candidate_paths |= { p for p in self._tests_by_path if p.startswith(path) } continue # If it's a test file, add just that file. candidate_paths |= {p for p in self._tests_by_path if path in p} for p in sorted(candidate_paths): tests = self._tests_by_path[p] for test in fltr(tests): yield test
class FasterMakeBackend(CommonBackend): def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._manifest_entries = OrderedDefaultDict(list) self._install_manifests = OrderedDefaultDict(OverwriteInstallManifest) self._dependencies = OrderedDefaultDict(list) def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj, defines) elif isinstance(obj, VariablePassthru) and \ obj.install_target.startswith('dist/bin'): for f in obj.variables.get('EXTRA_COMPONENTS', {}): path = mozpath.join(obj.install_target, 'components', mozpath.basename(f)) self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join('components', mozpath.basename(f)) ) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) for f in obj.variables.get('EXTRA_PP_COMPONENTS', {}): self._add_preprocess(obj, f, 'components', defines=defines) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) elif isinstance(obj, JavaScriptModules) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.modules.walk(): base = mozpath.join('modules', path) for f in strings: if obj.flavor == 'extra': self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f)) ) elif obj.flavor == 'extra_pp': self._add_preprocess(obj, f, base, defines=defines) elif isinstance(obj, JsPreferenceFile) and \ obj.install_target.startswith('dist/bin'): # The condition for the directory value in config/rules.mk is: # ifneq (,$(DIST_SUBDIR)$(XPI_NAME)$(LIBXUL_SDK)) # - LIBXUL_SDK is not supported (it likely doesn't work in the # recursive backend anyways # - when XPI_NAME is set, obj.install_target will start with # dist/xpi-stage # - when DIST_SUBDIR is set, obj.install_target will start with # dist/bin/$(DIST_SUBDIR) # So an equivalent condition that is not cumbersome for us and that # is enough at least for now is checking if obj.install_target is # different from dist/bin. if obj.install_target == 'dist/bin': pref_dir = 'defaults/pref' else: pref_dir = 'defaults/preferences' dest = mozpath.join(obj.install_target, pref_dir, mozpath.basename(obj.path)) # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. self._add_preprocess(obj, obj.path, pref_dir, defines=defines, silence_missing_directive_warnings=True) elif isinstance(obj, Resources) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.resources.walk(): base = mozpath.join('res', path) for f in strings: flags = strings.flags_for(f) if flags and flags.preprocess: self._add_preprocess(obj, f, base, marker='%', defines=obj.defines) else: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f)) ) elif isinstance(obj, FinalTargetFiles) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.files.walk(): base = mozpath.join(obj.install_target, path) for f in strings: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(path, mozpath.basename(f)) ) elif isinstance(obj, DistFiles) and \ obj.install_target.startswith('dist/bin'): # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. for f in obj.files: self._add_preprocess(obj, f, '', defines=defines, silence_missing_directive_warnings=True) else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True def _consume_jar_manifest(self, obj, defines): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() pp.context.update(defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() pp.do_include(obj.path) for jarinfo in pp.out: install_target = obj.install_target # Bug 1150417 added some gross hacks, which we don't try to # support generically. Fortunately, the hacks don't define more # than chrome manifest entries, so just assume we don't get # any installation entries. if jarinfo.name.startswith('../'): assert not jarinfo.entries base = mozpath.join('chrome', jarinfo.name) for e in jarinfo.entries: if e.is_locale: src = mozpath.join( jarinfo.relativesrcdir or mozpath.dirname(obj.path), 'en-US', e.source) elif e.source.startswith('/'): src = mozpath.join(self.environment.topsrcdir, e.source[1:]) else: src = mozpath.join(mozpath.dirname(obj.path), e.source) if '*' in e.source: if e.preprocess: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) self._install_manifests[obj.install_target] \ .add_pattern_symlink( prefix, src[len(prefix):], mozpath.join(base, e.output)) continue if not os.path.exists(src): if e.is_locale: raise Exception( '%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = mozpath.join(self.environment.topobjdir, e.source[1:]) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = mozpath.join(obj.objdir, e.source) self._dependencies['install-%s' % obj.install_target] \ .append(mozpath.relpath( src, self.environment.topobjdir)) if e.preprocess: kwargs = {} if src.endswith('.css'): kwargs['marker'] = '%' self._add_preprocess( obj, src, mozpath.join(base, mozpath.dirname(e.output)), mozpath.basename(e.output), defines=defines, **kwargs) else: self._install_manifests[obj.install_target].add_symlink( src, mozpath.join(base, e.output)) manifest = mozpath.normpath(mozpath.join(obj.install_target, base)) manifest += '.manifest' for m in jarinfo.chrome_manifests: self._manifest_entries[manifest].append( m.replace('%', jarinfo.name + '/')) # ../ special cased for bug 1150417 again. if not jarinfo.name.startswith('../'): manifest = mozpath.normpath(mozpath.join(obj.install_target, 'chrome.manifest')) entry = 'manifest %s.manifest' % base if entry not in self._manifest_entries[manifest]: self._manifest_entries[manifest].append(entry) def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): mk.add_statement('%s = %s' % (var, self.environment.substs[var])) # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) target = '$(TOPOBJDIR)/%s' % target mk.create_rule([target]).add_dependencies( ['content = %s' % ' '.join('"%s"' % e for e in entries)]) mk.add_statement('MANIFEST_TARGETS = %s' % ' '.join(manifest_targets)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies( '$(TOPOBJDIR)/%s' % d for d in deps) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
class TreeMetadataEmitter(LoggingMixin): """Converts the executed mozbuild files into data structures. This is a bridge between reader.py and data.py. It takes what was read by reader.BuildReader and converts it into the classes defined in the data module. """ def __init__(self, config): self.populate_logger() self.config = config mozinfo.find_and_update_from_json(config.topobjdir) # Python 2.6 doesn't allow unicode keys to be used for keyword # arguments. This gross hack works around the problem until we # rid ourselves of 2.6. self.info = {} for k, v in mozinfo.info.items(): if isinstance(k, unicode): k = k.encode('ascii') self.info[k] = v self._libs = OrderedDefaultDict(list) self._binaries = OrderedDict() self._linkage = [] self._static_linking_shared = set() # Keep track of external paths (third party build systems), starting # from what we run a subconfigure in. We'll eliminate some directories # as we traverse them with moz.build (e.g. js/src). subconfigures = os.path.join(self.config.topobjdir, 'subconfigures') paths = [] if os.path.exists(subconfigures): paths = open(subconfigures).read().splitlines() self._external_paths = set(mozpath.normsep(d) for d in paths) # Add security/nss manually, since it doesn't have a subconfigure. self._external_paths.add('security/nss') def emit(self, output): """Convert the BuildReader output into data structures. The return value from BuildReader.read_topsrcdir() (a generator) is typically fed into this function. """ file_count = 0 sandbox_execution_time = 0.0 emitter_time = 0.0 sandboxes = {} def emit_objs(objs): for o in objs: yield o if not o._ack: raise Exception('Unhandled object of type %s' % type(o)) for out in output: if isinstance(out, (MozbuildSandbox, GypSandbox)): # Keep all sandboxes around, we will need them later. sandboxes[out['OBJDIR']] = out start = time.time() # We need to expand the generator for the timings to work. objs = list(self.emit_from_sandbox(out)) emitter_time += time.time() - start for o in emit_objs(objs): yield o # Update the stats. file_count += len(out.all_paths) sandbox_execution_time += out.execution_time else: raise Exception('Unhandled output type: %s' % type(out)) start = time.time() objs = list(self._emit_libs_derived(sandboxes)) emitter_time += time.time() - start for o in emit_objs(objs): yield o yield ReaderSummary(file_count, sandbox_execution_time, emitter_time) def _emit_libs_derived(self, sandboxes): # First do FINAL_LIBRARY linkage. for lib in (l for libs in self._libs.values() for l in libs): if not isinstance(lib, StaticLibrary) or not lib.link_into: continue if lib.link_into not in self._libs: raise SandboxValidationError( 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME' % lib.link_into, sandboxes[lib.objdir]) candidates = self._libs[lib.link_into] # When there are multiple candidates, but all are in the same # directory and have a different type, we want all of them to # have the library linked. The typical usecase is when building # both a static and a shared library in a directory, and having # that as a FINAL_LIBRARY. if len(set(type(l) for l in candidates)) == len(candidates) and \ len(set(l.objdir for l in candidates)) == 1: for c in candidates: c.link_library(lib) else: raise SandboxValidationError( 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in ' 'multiple places:\n %s' % (lib.link_into, '\n '.join(l.objdir for l in candidates)), sandboxes[lib.objdir]) # Next, USE_LIBS linkage. for sandbox, obj, variable in self._linkage: self._link_libraries(sandbox, obj, variable) def recurse_refs(lib): for o in lib.refs: yield o if isinstance(o, StaticLibrary): for q in recurse_refs(o): yield q # Check that all static libraries refering shared libraries in # USE_LIBS are linked into a shared library or program. for lib in self._static_linking_shared: if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)): shared_libs = sorted(l.basename for l in lib.linked_libraries if isinstance(l, SharedLibrary)) raise SandboxValidationError( 'The static "%s" library is not used in a shared library ' 'or a program, but USE_LIBS contains the following shared ' 'library names:\n %s\n\nMaybe you can remove the ' 'static "%s" library?' % (lib.basename, '\n '.join(shared_libs), lib.basename), sandboxes[lib.objdir]) def recurse_libs(lib): for obj in lib.linked_libraries: if not isinstance(obj, StaticLibrary) or not obj.link_into: continue yield obj.objdir for q in recurse_libs(obj): yield q sent_passthru = set() for lib in (l for libs in self._libs.values() for l in libs): # For all root libraries (i.e. libraries that don't have a # FINAL_LIBRARY), record, for each static library it links # (recursively), that its FINAL_LIBRARY is that root library. if isinstance(lib, Library): if isinstance(lib, SharedLibrary) or not lib.link_into: for p in recurse_libs(lib): if p in sent_passthru: continue sent_passthru.add(p) passthru = VariablePassthru(sandboxes[p]) passthru.variables['FINAL_LIBRARY'] = lib.basename yield passthru yield lib for obj in self._binaries.values(): yield obj LIBRARY_NAME_VAR = { 'host': 'HOST_LIBRARY_NAME', 'target': 'LIBRARY_NAME', } def _link_libraries(self, sandbox, obj, variable): """Add linkage declarations to a given object.""" assert isinstance(obj, Linkable) extra = [] # Add stdc++compat library when wanted and needed compat_varname = 'MOZ_LIBSTDCXX_%s_VERSION' % obj.KIND.upper() if sandbox.config.substs.get(compat_varname) \ and not isinstance(obj, (StaticLibrary, HostLibrary)): extra.append({ 'target': 'stdc++compat', 'host': 'host_stdc++compat', }[obj.KIND]) for path in sandbox.get(variable, []) + extra: force_static = path.startswith('static:') and obj.KIND == 'target' if force_static: path = path[7:] name = mozpath.basename(path) dir = mozpath.dirname(path) candidates = [l for l in self._libs[name] if l.KIND == obj.KIND] if dir: if dir.startswith('/'): dir = mozpath.normpath( mozpath.join(obj.topobjdir, dir[1:])) else: dir = mozpath.normpath( mozpath.join(obj.objdir, dir)) dir = mozpath.relpath(dir, obj.topobjdir) candidates = [l for l in candidates if l.relobjdir == dir] if not candidates: # If the given directory is under one of the external # (third party) paths, use a fake library reference to # there. for d in self._external_paths: if dir.startswith('%s/' % d): candidates = [self._get_external_library(dir, name, force_static)] break if not candidates: raise SandboxValidationError( '%s contains "%s", but there is no "%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), sandbox) if len(candidates) > 1: # If there's more than one remaining candidate, it could be # that there are instances for the same library, in static and # shared form. libs = {} for l in candidates: key = mozpath.join(l.relobjdir, l.basename) if force_static: if isinstance(l, StaticLibrary): libs[key] = l else: if key in libs and isinstance(l, SharedLibrary): libs[key] = l if key not in libs: libs[key] = l candidates = libs.values() if force_static and not candidates: if dir: raise SandboxValidationError( '%s contains "static:%s", but there is no static ' '"%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), sandbox) raise SandboxValidationError( '%s contains "static:%s", but there is no static "%s" ' '%s in the tree' % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]), sandbox) if not candidates: raise SandboxValidationError( '%s contains "%s", which does not match any %s in the tree.' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]), sandbox) elif len(candidates) > 1: paths = (mozpath.join(l.relativedir, 'moz.build') for l in candidates) raise SandboxValidationError( '%s contains "%s", which matches a %s defined in multiple ' 'places:\n %s' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND], '\n '.join(paths)), sandbox) elif force_static and not isinstance(candidates[0], StaticLibrary): raise SandboxValidationError( '%s contains "static:%s", but there is only a shared "%s" ' 'in %s. You may want to add FORCE_STATIC_LIB=True in ' '%s/moz.build, or remove "static:".' % (variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir), sandbox) elif isinstance(obj, StaticLibrary) and isinstance(candidates[0], SharedLibrary): self._static_linking_shared.add(obj) obj.link_library(candidates[0]) # Link system libraries from OS_LIBS/HOST_OS_LIBS. for lib in sandbox.get(variable.replace('USE', 'OS'), []): obj.link_system_library(lib) @memoize def _get_external_library(self, dir, name, force_static): # Create ExternalStaticLibrary or ExternalSharedLibrary object with a # mock sandbox more or less truthful about where the external library # is. sandbox = GlobalNamespace() sandbox.config = self.config sandbox.main_path = dir sandbox.all_paths = set([dir]) with sandbox.allow_all_writes() as s: s['TOPSRCDIR'] = self.config.topsrcdir s['TOPOBJDIR'] = self.config.topobjdir s['RELATIVEDIR'] = dir s['SRCDIR'] = mozpath.join(self.config.topsrcdir, dir) s['OBJDIR'] = mozpath.join(self.config.topobjdir, dir) if force_static: return ExternalStaticLibrary(sandbox, name) else: return ExternalSharedLibrary(sandbox, name) def emit_from_sandbox(self, sandbox): """Convert a MozbuildSandbox to tree metadata objects. This is a generator of mozbuild.frontend.data.SandboxDerived instances. """ # We always emit a directory traversal descriptor. This is needed by # the recursive make backend. for o in self._emit_directory_traversal_from_sandbox(sandbox): yield o for path in sandbox['CONFIGURE_SUBST_FILES']: yield self._create_substitution(ConfigFileSubstitution, sandbox, path) for path in sandbox['CONFIGURE_DEFINE_FILES']: yield self._create_substitution(HeaderFileSubstitution, sandbox, path) # XPIDL source files get processed and turned into .h and .xpt files. # If there are multiple XPIDL files in a directory, they get linked # together into a final .xpt, which has the name defined by # XPIDL_MODULE. xpidl_module = sandbox['XPIDL_MODULE'] if sandbox['XPIDL_SOURCES'] and not xpidl_module: raise SandboxValidationError('XPIDL_MODULE must be defined if ' 'XPIDL_SOURCES is defined.', sandbox) if xpidl_module and not sandbox['XPIDL_SOURCES']: raise SandboxValidationError('XPIDL_MODULE cannot be defined ' 'unless there are XPIDL_SOURCES', sandbox) if sandbox['XPIDL_SOURCES'] and sandbox['NO_DIST_INSTALL']: self.log(logging.WARN, 'mozbuild_warning', dict( path=sandbox.main_path), '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.') for idl in sandbox['XPIDL_SOURCES']: yield XPIDLFile(sandbox, mozpath.join(sandbox['SRCDIR'], idl), xpidl_module) for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'): for src in (sandbox[symbol] or []): if not os.path.exists(mozpath.join(sandbox['SRCDIR'], src)): raise SandboxValidationError('Reference to a file that ' 'doesn\'t exist in %s (%s)' % (symbol, src), sandbox) # Proxy some variables as-is until we have richer classes to represent # them. We should aim to keep this set small because it violates the # desired abstraction of the build definition away from makefiles. passthru = VariablePassthru(sandbox) varlist = [ 'ANDROID_GENERATED_RESFILES', 'ANDROID_RES_DIRS', 'DISABLE_STL_WRAPPING', 'EXTRA_ASSEMBLER_FLAGS', 'EXTRA_COMPILE_FLAGS', 'EXTRA_COMPONENTS', 'EXTRA_DSO_LDOPTS', 'EXTRA_PP_COMPONENTS', 'FAIL_ON_WARNINGS', 'FILES_PER_UNIFIED_FILE', 'USE_STATIC_LIBS', 'GENERATED_FILES', 'IS_GYP_DIR', 'MSVC_ENABLE_PGO', 'NO_DIST_INSTALL', 'PYTHON_UNIT_TESTS', 'RCFILE', 'RESFILE', 'RCINCLUDE', 'DEFFILE', 'WIN32_EXE_LDFLAGS', 'LD_VERSION_SCRIPT', ] for v in varlist: if v in sandbox and sandbox[v]: passthru.variables[v] = sandbox[v] for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']: if v in sandbox and sandbox[v]: passthru.variables['MOZBUILD_' + v] = sandbox[v] # NO_VISIBILITY_FLAGS is slightly different if sandbox['NO_VISIBILITY_FLAGS']: passthru.variables['VISIBILITY_FLAGS'] = '' if sandbox['DELAYLOAD_DLLS']: passthru.variables['DELAYLOAD_LDFLAGS'] = [('-DELAYLOAD:%s' % dll) for dll in sandbox['DELAYLOAD_DLLS']] passthru.variables['USE_DELAYIMP'] = True varmap = dict( SOURCES={ '.s': 'ASFILES', '.asm': 'ASFILES', '.c': 'CSRCS', '.m': 'CMSRCS', '.mm': 'CMMSRCS', '.cc': 'CPPSRCS', '.cpp': 'CPPSRCS', '.cxx': 'CPPSRCS', '.S': 'SSRCS', }, HOST_SOURCES={ '.c': 'HOST_CSRCS', '.mm': 'HOST_CMMSRCS', '.cc': 'HOST_CPPSRCS', '.cpp': 'HOST_CPPSRCS', '.cxx': 'HOST_CPPSRCS', }, UNIFIED_SOURCES={ '.c': 'UNIFIED_CSRCS', '.mm': 'UNIFIED_CMMSRCS', '.cc': 'UNIFIED_CPPSRCS', '.cpp': 'UNIFIED_CPPSRCS', '.cxx': 'UNIFIED_CPPSRCS', } ) varmap.update(dict(('GENERATED_%s' % k, v) for k, v in varmap.items() if k in ('SOURCES', 'UNIFIED_SOURCES'))) for variable, mapping in varmap.items(): for f in sandbox[variable]: ext = mozpath.splitext(f)[1] if ext not in mapping: raise SandboxValidationError( '%s has an unknown file type.' % f, sandbox) l = passthru.variables.setdefault(mapping[ext], []) l.append(f) if variable.startswith('GENERATED_'): l = passthru.variables.setdefault('GARBAGE', []) l.append(f) no_pgo = sandbox.get('NO_PGO') sources = sandbox.get('SOURCES', []) no_pgo_sources = [f for f in sources if sources[f].no_pgo] if no_pgo: if no_pgo_sources: raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo ' 'cannot be set at the same time', sandbox) passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo if no_pgo_sources: passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources sources_with_flags = [f for f in sources if sources[f].flags] for f in sources_with_flags: ext = mozpath.splitext(f)[1] yield PerSourceFlag(sandbox, f, sources[f].flags) exports = sandbox.get('EXPORTS') if exports: yield Exports(sandbox, exports, dist_install=not sandbox.get('NO_DIST_INSTALL', False)) defines = sandbox.get('DEFINES') if defines: yield Defines(sandbox, defines) resources = sandbox.get('RESOURCE_FILES') if resources: yield Resources(sandbox, resources, defines) for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]: program = sandbox.get(kind) if program: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" as %s name, ' 'because it is already used in %s' % (program, kind, self._binaries[program].relativedir), sandbox) self._binaries[program] = cls(sandbox, program) self._linkage.append((sandbox, self._binaries[program], kind.replace('PROGRAM', 'USE_LIBS'))) for kind, cls in [ ('SIMPLE_PROGRAMS', SimpleProgram), ('CPP_UNIT_TESTS', SimpleProgram), ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]: for program in sandbox[kind]: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" in %s, ' 'because it is already used in %s' % (program, kind, self._binaries[program].relativedir), sandbox) self._binaries[program] = cls(sandbox, program, is_unit_test=kind == 'CPP_UNIT_TESTS') self._linkage.append((sandbox, self._binaries[program], 'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS' else 'USE_LIBS')) extra_js_modules = sandbox.get('EXTRA_JS_MODULES') if extra_js_modules: yield JavaScriptModules(sandbox, extra_js_modules, 'extra') extra_pp_js_modules = sandbox.get('EXTRA_PP_JS_MODULES') if extra_pp_js_modules: yield JavaScriptModules(sandbox, extra_pp_js_modules, 'extra_pp') test_js_modules = sandbox.get('TESTING_JS_MODULES') if test_js_modules: yield JavaScriptModules(sandbox, test_js_modules, 'testing') simple_lists = [ ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile), ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile), ('IPDL_SOURCES', IPDLFile), ('LOCAL_INCLUDES', LocalInclude), ('GENERATED_INCLUDES', GeneratedInclude), ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile), ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile), ('TEST_WEBIDL_FILES', TestWebIDLFile), ('WEBIDL_FILES', WebIDLFile), ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface), ] for sandbox_var, klass in simple_lists: for name in sandbox.get(sandbox_var, []): yield klass(sandbox, name) if sandbox.get('FINAL_TARGET') or sandbox.get('XPI_NAME') or \ sandbox.get('DIST_SUBDIR'): yield InstallationTarget(sandbox) host_libname = sandbox.get('HOST_LIBRARY_NAME') libname = sandbox.get('LIBRARY_NAME') if host_libname: if host_libname == libname: raise SandboxValidationError('LIBRARY_NAME and ' 'HOST_LIBRARY_NAME must have a different value', sandbox) lib = HostLibrary(sandbox, host_libname) self._libs[host_libname].append(lib) self._linkage.append((sandbox, lib, 'HOST_USE_LIBS')) final_lib = sandbox.get('FINAL_LIBRARY') if not libname and final_lib: # If no LIBRARY_NAME is given, create one. libname = sandbox['RELATIVEDIR'].replace('/', '_') static_lib = sandbox.get('FORCE_STATIC_LIB') shared_lib = sandbox.get('FORCE_SHARED_LIB') static_name = sandbox.get('STATIC_LIBRARY_NAME') shared_name = sandbox.get('SHARED_LIBRARY_NAME') is_framework = sandbox.get('IS_FRAMEWORK') is_component = sandbox.get('IS_COMPONENT') soname = sandbox.get('SONAME') shared_args = {} static_args = {} if final_lib: if isinstance(sandbox, MozbuildSandbox): if static_lib: raise SandboxValidationError( 'FINAL_LIBRARY implies FORCE_STATIC_LIB. ' 'Please remove the latter.', sandbox) if shared_lib: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. ' 'Please remove one.', sandbox) if is_framework: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_FRAMEWORK. ' 'Please remove one.', sandbox) if is_component: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_COMPONENT. ' 'Please remove one.', sandbox) static_args['link_into'] = final_lib static_lib = True if libname: if is_component: if shared_lib: raise SandboxValidationError( 'IS_COMPONENT implies FORCE_SHARED_LIB. ' 'Please remove the latter.', sandbox) if is_framework: raise SandboxValidationError( 'IS_COMPONENT conflicts with IS_FRAMEWORK. ' 'Please remove one.', sandbox) if static_lib: raise SandboxValidationError( 'IS_COMPONENT conflicts with FORCE_STATIC_LIB. ' 'Please remove one.', sandbox) shared_lib = True shared_args['variant'] = SharedLibrary.COMPONENT if is_framework: if shared_lib: raise SandboxValidationError( 'IS_FRAMEWORK implies FORCE_SHARED_LIB. ' 'Please remove the latter.', sandbox) if soname: raise SandboxValidationError( 'IS_FRAMEWORK conflicts with SONAME. ' 'Please remove one.', sandbox) shared_lib = True shared_args['variant'] = SharedLibrary.FRAMEWORK if static_name: if not static_lib: raise SandboxValidationError( 'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB', sandbox) static_args['real_name'] = static_name if shared_name: if not shared_lib: raise SandboxValidationError( 'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB', sandbox) shared_args['real_name'] = shared_name if soname: if not shared_lib: raise SandboxValidationError( 'SONAME requires FORCE_SHARED_LIB', sandbox) shared_args['soname'] = soname if not static_lib and not shared_lib: static_lib = True # If both a shared and a static library are created, only the # shared library is meant to be a SDK library. if sandbox.get('SDK_LIBRARY'): if shared_lib: shared_args['is_sdk'] = True elif static_lib: static_args['is_sdk'] = True if shared_lib and static_lib: if not static_name and not shared_name: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but neither STATIC_LIBRARY_NAME or ' 'SHARED_LIBRARY_NAME is set. At least one is required.', sandbox) if static_name and not shared_name and static_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and SHARED_LIBRARY_NAME is unset. Please either ' 'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set ' 'SHARED_LIBRARY_NAME.', sandbox) if shared_name and not static_name and shared_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and STATIC_LIBRARY_NAME is unset. Please either ' 'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set ' 'STATIC_LIBRARY_NAME.', sandbox) if shared_name and static_name and shared_name == static_name: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as ' 'STATIC_LIBRARY_NAME. Please change one of them.', sandbox) if shared_lib: lib = SharedLibrary(sandbox, libname, **shared_args) self._libs[libname].append(lib) self._linkage.append((sandbox, lib, 'USE_LIBS')) if static_lib: lib = StaticLibrary(sandbox, libname, **static_args) self._libs[libname].append(lib) self._linkage.append((sandbox, lib, 'USE_LIBS')) # While there are multiple test manifests, the behavior is very similar # across them. We enforce this by having common handling of all # manifests and outputting a single class type with the differences # described inside the instance. # # Keys are variable prefixes and values are tuples describing how these # manifests should be handled: # # (flavor, install_prefix, active) # # flavor identifies the flavor of this test. # install_prefix is the path prefix of where to install the files in # the tests directory. # active indicates whether to filter out inactive tests from the # manifest. # # We ideally don't filter out inactive tests. However, not every test # harness can yet deal with test filtering. Once all harnesses can do # this, this feature can be dropped. test_manifests = dict( A11Y=('a11y', 'testing/mochitest', 'a11y', True), BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True), METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True), MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True), MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True), MOCHITEST_WEBAPPRT_CHROME=('webapprt-chrome', 'testing/mochitest', 'webapprtChrome', True), WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True), XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', False), ) for prefix, info in test_manifests.items(): for path in sandbox.get('%s_MANIFESTS' % prefix, []): for obj in self._process_test_manifest(sandbox, info, path): yield obj for flavor in ('crashtest', 'reftest'): for path in sandbox.get('%s_MANIFESTS' % flavor.upper(), []): for obj in self._process_reftest_manifest(sandbox, flavor, path): yield obj jar_manifests = sandbox.get('JAR_MANIFESTS', []) if len(jar_manifests) > 1: raise SandboxValidationError('While JAR_MANIFESTS is a list, ' 'it is currently limited to one value.', sandbox) for path in jar_manifests: yield JARManifest(sandbox, mozpath.join(sandbox['SRCDIR'], path)) # Temporary test to look for jar.mn files that creep in without using # the new declaration. Before, we didn't require jar.mn files to # declared anywhere (they were discovered). This will detect people # relying on the old behavior. if os.path.exists(os.path.join(sandbox['SRCDIR'], 'jar.mn')): if 'jar.mn' not in jar_manifests: raise SandboxValidationError('A jar.mn exists but it ' 'is not referenced in the moz.build file. ' 'Please define JAR_MANIFESTS.', sandbox) for name, jar in sandbox.get('JAVA_JAR_TARGETS', {}).items(): yield SandboxWrapped(sandbox, jar) for name, data in sandbox.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items(): yield SandboxWrapped(sandbox, data) if passthru.variables: yield passthru def _create_substitution(self, cls, sandbox, path): if os.path.isabs(path): path = path[1:] sub = cls(sandbox) sub.input_path = mozpath.join(sandbox['SRCDIR'], '%s.in' % path) sub.output_path = mozpath.join(sandbox['OBJDIR'], path) sub.relpath = path return sub def _process_test_manifest(self, sandbox, info, manifest_path): flavor, install_root, install_subdir, filter_inactive = info manifest_path = mozpath.normpath(manifest_path) path = mozpath.normpath(mozpath.join(sandbox['SRCDIR'], manifest_path)) manifest_dir = mozpath.dirname(path) manifest_reldir = mozpath.dirname(mozpath.relpath(path, sandbox['TOPSRCDIR'])) install_prefix = mozpath.join(install_root, install_subdir) try: m = manifestparser.TestManifest(manifests=[path], strict=True) defaults = m.manifest_defaults[os.path.normpath(path)] if not m.tests and not 'support-files' in defaults: raise SandboxValidationError('Empty test manifest: %s' % path, sandbox) obj = TestManifest(sandbox, path, m, flavor=flavor, install_prefix=install_prefix, relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), dupe_manifest='dupe-manifest' in defaults) filtered = m.tests if filter_inactive: # We return tests that don't exist because we want manifests # defining tests that don't exist to result in error. filtered = m.active_tests(exists=False, disabled=True, **self.info) missing = [t['name'] for t in filtered if not os.path.exists(t['path'])] if missing: raise SandboxValidationError('Test manifest (%s) lists ' 'test that does not exist: %s' % ( path, ', '.join(missing)), sandbox) out_dir = mozpath.join(install_prefix, manifest_reldir) if 'install-to-subdir' in defaults: # This is terrible, but what are you going to do? out_dir = mozpath.join(out_dir, defaults['install-to-subdir']) obj.manifest_obj_relpath = mozpath.join(manifest_reldir, defaults['install-to-subdir'], mozpath.basename(path)) # "head" and "tail" lists. # All manifests support support-files. # # Keep a set of already seen support file patterns, because # repeatedly processing the patterns from the default section # for every test is quite costly (see bug 922517). extras = (('head', set()), ('tail', set()), ('support-files', set())) def process_support_files(test): for thing, seen in extras: value = test.get(thing, '') if value in seen: continue seen.add(value) for pattern in value.split(): # We only support globbing on support-files because # the harness doesn't support * for head and tail. if '*' in pattern and thing == 'support-files': obj.pattern_installs.append( (manifest_dir, pattern, out_dir)) # "absolute" paths identify files that are to be # placed in the install_root directory (no globs) elif pattern[0] == '/': full = mozpath.normpath(mozpath.join(manifest_dir, mozpath.basename(pattern))) obj.installs[full] = (mozpath.join(install_root, pattern[1:]), False) else: full = mozpath.normpath(mozpath.join(manifest_dir, pattern)) dest_path = mozpath.join(out_dir, pattern) # If the path resolves to a different directory # tree, we take special behavior depending on the # entry type. if not full.startswith(manifest_dir): # If it's a support file, we install the file # into the current destination directory. # This implementation makes installing things # with custom prefixes impossible. If this is # needed, we can add support for that via a # special syntax later. if thing == 'support-files': dest_path = mozpath.join(out_dir, os.path.basename(pattern)) # If it's not a support file, we ignore it. # This preserves old behavior so things like # head files doesn't get installed multiple # times. else: continue obj.installs[full] = (mozpath.normpath(dest_path), False) for test in filtered: obj.tests.append(test) obj.installs[mozpath.normpath(test['path'])] = \ (mozpath.join(out_dir, test['relpath']), True) process_support_files(test) if not filtered: # If there are no tests, look for support-files under DEFAULT. process_support_files(defaults) # We also copy manifests into the output directory, # including manifests from [include:foo] directives. for mpath in m.manifests(): mpath = mozpath.normpath(mpath) out_path = mozpath.join(out_dir, mozpath.basename(mpath)) obj.installs[mpath] = (out_path, False) # Some manifests reference files that are auto generated as # part of the build or shouldn't be installed for some # reason. Here, we prune those files from the install set. # FUTURE we should be able to detect autogenerated files from # other build metadata. Once we do that, we can get rid of this. for f in defaults.get('generated-files', '').split(): # We re-raise otherwise the stack trace isn't informative. try: del obj.installs[mozpath.join(manifest_dir, f)] except KeyError: raise SandboxValidationError('Error processing test ' 'manifest %s: entry in generated-files not present ' 'elsewhere in manifest: %s' % (path, f), sandbox) obj.external_installs.add(mozpath.join(out_dir, f)) yield obj except (AssertionError, Exception): raise SandboxValidationError('Error processing test ' 'manifest file %s: %s' % (path, '\n'.join(traceback.format_exception(*sys.exc_info()))), sandbox) def _process_reftest_manifest(self, sandbox, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath(mozpath.join( sandbox['SRCDIR'], manifest_path)) manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, sandbox['TOPSRCDIR'])) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. obj = TestManifest(sandbox, manifest_full_path, manifest, flavor=flavor, install_prefix='%s/' % flavor, relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path))) for test in sorted(manifest.files): obj.tests.append({ 'path': test, 'here': mozpath.dirname(test), 'manifest': manifest_full_path, 'name': mozpath.basename(test), 'head': '', 'tail': '', 'support-files': '', 'subsuite': '', }) yield obj def _emit_directory_traversal_from_sandbox(self, sandbox): o = DirectoryTraversal(sandbox) o.dirs = sandbox.get('DIRS', []) o.test_dirs = sandbox.get('TEST_DIRS', []) o.affected_tiers = sandbox.get_affected_tiers() # Some paths have a subconfigure, yet also have a moz.build. Those # shouldn't end up in self._external_paths. self._external_paths -= { o.relobjdir } if 'TIERS' in sandbox: for tier in sandbox['TIERS']: o.tier_dirs[tier] = sandbox['TIERS'][tier]['regular'] + \ sandbox['TIERS'][tier]['external'] yield o
class TupBackend(CommonBackend): """Backend that generates Tupfiles for the tup build system. """ def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) self._compile_env_gen_files = ( '*.c', '*.cpp', '*.h', '*.inc', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' # The preprocessor including source-repo.h and buildid.h creates # dependencies that aren't specified by moz.build and cause errors # in Tup. Express these as a group dependency. self._early_generated_files = '$(MOZ_OBJ_ROOT)/<early-generated-files>' self._built_in_addons = set() self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json' def _get_mozconfig_env(self, config): env = {} loader = MozconfigLoader(config.topsrcdir) mozconfig = loader.read_mozconfig(config.substs['MOZCONFIG']) make_extra = mozconfig['make_extra'] or [] env = {} for line in make_extra: if line.startswith('export '): line = line[len('export '):] key, value = line.split('=') env[key] = value return env def build(self, config, output, jobs, verbose, what=None): if not what: what = [self.environment.topobjdir] args = [self.environment.substs['TUP'], 'upd'] + what if self.environment.substs.get('MOZ_AUTOMATION'): args += ['--quiet'] if verbose: args += ['--verbose'] if jobs > 0: args += ['-j%d' % jobs] else: args += ['-j%d' % multiprocessing.cpu_count()] return config.run_process(args=args, line_handler=output.on_line, ensure_exit_code=False, append_env=self._get_mozconfig_env(config)) def _get_backend_file(self, relobjdir): objdir = mozpath.normpath( mozpath.join(self.environment.topobjdir, relobjdir)) if objdir not in self._backend_files: self._backend_files[objdir] = \ BackendTupfile(objdir, self.environment, self.environment.topsrcdir, self.environment.topobjdir, self.dry_run) return self._backend_files[objdir] def _get_backend_file_for(self, obj): return self._get_backend_file(obj.relobjdir) def _py_action(self, action): cmd = [ '$(PYTHON)', '-m', 'mozbuild.action.%s' % action, ] return cmd def _lib_paths(self, objdir, libs): return [ mozpath.relpath(mozpath.join(l.objdir, l.import_name), objdir) for l in libs ] def _gen_shared_library(self, backend_file): shlib = backend_file.shared_lib if shlib.name == 'libxul.so': # This will fail to link currently due to missing rust symbols. return if shlib.cxx_link: mkshlib = ([backend_file.environment.substs['CXX']] + backend_file.local_flags['CXX_LDFLAGS']) else: mkshlib = ([backend_file.environment.substs['CC']] + backend_file.local_flags['C_LDFLAGS']) mkshlib += (backend_file.environment.substs['DSO_PIC_CFLAGS'] + [backend_file.environment.substs['DSO_LDOPTS']] + ['-Wl,-h,%s' % shlib.soname] + ['-o', shlib.lib_name]) objs, _, shared_libs, os_libs, static_libs = self._expand_libs(shlib) static_libs = self._lib_paths(backend_file.objdir, static_libs) shared_libs = self._lib_paths(backend_file.objdir, shared_libs) list_file_name = '%s.list' % shlib.name.replace('.', '_') list_file = self._make_list_file(backend_file.objdir, objs, list_file_name) inputs = objs + static_libs + shared_libs if any(i.endswith('libxul.so') for i in inputs): # Don't attempt to link anything that depends on libxul. return symbols_file = [] if shlib.symbols_file: inputs.append(shlib.symbols_file) # TODO: Assumes GNU LD symbols_file = ['-Wl,--version-script,%s' % shlib.symbols_file] cmd = (mkshlib + [list_file] + backend_file.local_flags['LDFLAGS'] + static_libs + shared_libs + symbols_file + [backend_file.environment.substs['OS_LIBS']] + os_libs) backend_file.rule(cmd=cmd, inputs=inputs, outputs=[shlib.lib_name], display='LINK %o') backend_file.symlink_rule(mozpath.join(backend_file.objdir, shlib.lib_name), output=mozpath.join( self.environment.topobjdir, shlib.install_target, shlib.lib_name)) def _gen_program(self, backend_file): cc_or_cxx = 'CXX' if backend_file.program.cxx_link else 'CC' objs, _, shared_libs, os_libs, static_libs = self._expand_libs( backend_file.program) static_libs = self._lib_paths(backend_file.objdir, static_libs) shared_libs = self._lib_paths(backend_file.objdir, shared_libs) inputs = objs + static_libs + shared_libs if any(i.endswith('libxul.so') for i in inputs): # Don't attempt to link anything that depends on libxul. return list_file_name = '%s.list' % backend_file.program.name.replace( '.', '_') list_file = self._make_list_file(backend_file.objdir, objs, list_file_name) outputs = [ mozpath.relpath(backend_file.program.output_path.full_path, backend_file.objdir) ] cmd = ([backend_file.environment.substs[cc_or_cxx], '-o', '%o'] + backend_file.local_flags['CXX_LDFLAGS'] + [list_file] + backend_file.local_flags['LDFLAGS'] + static_libs + [backend_file.environment.substs['MOZ_PROGRAM_LDFLAGS']] + shared_libs + [backend_file.environment.substs['OS_LIBS']] + os_libs) backend_file.rule(cmd=cmd, inputs=inputs, outputs=outputs, display='LINK %o') def _gen_static_library(self, backend_file): ar = [ backend_file.environment.substs['AR'], backend_file.environment.substs['AR_FLAGS'].replace('$@', '%o') ] objs, _, shared_libs, _, static_libs = self._expand_libs( backend_file.static_lib) static_libs = self._lib_paths(backend_file.objdir, static_libs) shared_libs = self._lib_paths(backend_file.objdir, shared_libs) inputs = objs + static_libs cmd = (ar + inputs) backend_file.rule(cmd=cmd, inputs=inputs, outputs=[backend_file.static_lib.name], display='AR %o') def consume_object(self, obj): """Write out build files necessary to build with tup.""" if not isinstance(obj, ContextDerived): return False consumed = CommonBackend.consume_object(self, obj) if consumed: return True backend_file = self._get_backend_file_for(obj) if isinstance(obj, GeneratedFile): skip_files = [] if self.environment.is_artifact_build: skip_files = self._compile_env_gen_files for f in obj.outputs: if any(mozpath.match(f, p) for p in skip_files): return False if backend_file.requires_delay(obj.inputs): backend_file.delayed_generated_files.append(obj) else: self._process_generated_file(backend_file, obj) elif (isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith('dist/bin')): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, Defines): self._process_defines(backend_file, obj) elif isinstance(obj, HostDefines): self._process_defines(backend_file, obj, host=True) elif isinstance(obj, FinalTargetFiles): self._process_final_target_files(obj) elif isinstance(obj, FinalTargetPreprocessedFiles): self._process_final_target_pp_files(obj, backend_file) elif isinstance(obj, JARManifest): self._consume_jar_manifest(obj) elif isinstance(obj, PerSourceFlag): backend_file.per_source_flags[obj.file_name].extend(obj.flags) elif isinstance(obj, ComputedFlags): self._process_computed_flags(obj, backend_file) elif isinstance(obj, (Sources, GeneratedSources)): backend_file.sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, HostSources): backend_file.host_sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, VariablePassthru): backend_file.variables = obj.variables elif isinstance(obj, StaticLibrary): backend_file.static_lib = obj elif isinstance(obj, SharedLibrary): backend_file.shared_lib = obj elif isinstance(obj, HostProgram): pass elif isinstance(obj, Program): backend_file.program = obj elif isinstance(obj, DirectoryTraversal): pass return True def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) if self._built_in_addons: with self._write_file( mozpath.join(self.environment.topobjdir, self._built_in_addons_file)) as fh: json.dump({'system': sorted(list(self._built_in_addons))}, fh) for objdir, backend_file in sorted(self._backend_files.items()): backend_file.gen_sources_rules([self._installed_files]) for condition, gen_method in ( (backend_file.shared_lib, self._gen_shared_library), (backend_file.static_lib and backend_file.static_lib.no_expand_lib, self._gen_static_library), (backend_file.program, self._gen_program)): if condition: backend_file.export_shell() gen_method(backend_file) for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) for path, output, output_group in backend_file.delayed_installed_files: backend_file.symlink_rule(path, output=output, output_group=output_group) with self._write_file(fh=backend_file): pass with self._write_file( mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines_flags = ' '.join([ '-D%s=%s' % (name, shell_quote(value)) for (name, value) in sorted(self.environment.acdefines.iteritems()) ]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # TODO: BOOKMARKS_INCLUDE_DIR is used by bookmarks.html.in, and is # only defined in browser/locales/Makefile.in acdefines_flags += ' -DBOOKMARKS_INCLUDE_DIR=%s/browser/locales/en-US/profile' % self.environment.topsrcdir # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % (os.path.relpath( self.environment.topsrcdir, self.environment.topobjdir))) fh.write('PYTHON = PYTHONDONTWRITEBYTECODE=1 %s\n' % self.environment.substs['PYTHON']) fh.write( 'PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write( 'IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n' ) # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init']) def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'layout/style/test', # HostSimplePrograms 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) extra_exports = { 'buildid.h': ['MOZ_BUILD_DATE'], } for f in obj.outputs: exports = extra_exports.get(f) if exports: backend_file.export(exports) if any(f in obj.outputs for f in ('source-repo.h', 'buildid.h')): extra_outputs = [self._early_generated_files] else: extra_outputs = [self._installed_files ] if obj.required_for_compile else [] full_inputs += [self._early_generated_files] backend_file.rule( display='python {script}:{method} -> [%o]'.format( script=obj.script, method=obj.method), cmd=cmd, inputs=full_inputs, outputs=outputs, extra_outputs=extra_outputs, ) def _process_defines(self, backend_file, obj, host=False): defines = list(obj.get_defines()) if defines: if host: backend_file.host_defines = defines else: backend_file.defines = defines def _add_features(self, target, path): path_parts = mozpath.split(path) if all([ target == 'dist/bin/browser', path_parts[0] == 'features', len(path_parts) > 1 ]): self._built_in_addons.add(path_parts[1]) def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) if target.startswith('_tests'): # TODO: TEST_HARNESS_FILES present a few challenges for the tup # backend (bug 1372381). return for path, files in obj.files.walk(): self._add_features(target, path) for f in files: if not isinstance(f, ObjDirPath): backend_file = self._get_backend_file( mozpath.join(target, path)) if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception( "Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self.backend_input_files.add(prefix) finder = FileFinder(prefix) for p, _ in finder.find(f.full_path[len(prefix):]): backend_file.symlink_rule( mozpath.join(prefix, p), output=mozpath.join(f.target_basename, p), output_group=self._installed_files) else: backend_file.symlink_rule( f.full_path, output=f.target_basename, output_group=self._installed_files) else: if (self.environment.is_artifact_build and any( mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)): # If we have an artifact build we never would have generated this file, # so do not attempt to install it. continue # We're not generating files in these directories yet, so # don't attempt to install files generated from them. if f.context.relobjdir not in ('layout/style/test', 'toolkit/library', 'js/src/shell'): output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path, f.target_basename) gen_backend_file = self._get_backend_file( f.context.relobjdir) if gen_backend_file.requires_delay([f]): output_group = self._installed_files if f.target_basename.endswith( '.h') else None gen_backend_file.delayed_installed_files.append( (f.full_path, output, output_group)) else: gen_backend_file.symlink_rule( f.full_path, output=output, output_group=self._installed_files) def _process_final_target_pp_files(self, obj, backend_file): for i, (path, files) in enumerate(obj.files.walk()): self._add_features(obj.install_target, path) for f in files: self._preprocess(backend_file, f.full_path, destdir=mozpath.join( self.environment.topobjdir, obj.install_target, path), target=f.target_basename) def _process_computed_flags(self, obj, backend_file): for var, flags in obj.get_flags(): backend_file.local_flags[var] = flags def _process_unified_sources(self, obj): backend_file = self._get_backend_file_for(obj) files = [f[0] for f in obj.unified_source_mapping] backend_file.sources[obj.canonical_suffix].extend(files) def _handle_idl_manager(self, manager): if self.environment.is_artifact_build: return dist_idl_backend_file = self._get_backend_file('dist/idl') for idl in manager.idls.values(): dist_idl_backend_file.symlink_rule( idl['source'], output_group=self._installed_idls) backend_file = self._get_backend_file('xpcom/xpidl') backend_file.export_shell() all_xpts = [] for module, data in sorted(manager.modules.iteritems()): _, idls = data cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '-I$(IDL_PARSER_DIR)', '-I$(IDL_PARSER_CACHE_DIR)', '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py', '--cache-dir', '$(IDL_PARSER_CACHE_DIR)', '--bindings-conf', '$(topsrcdir)/dom/bindings/Bindings.conf', '$(DIST)/idl', '$(DIST)/include', '$(DIST)/xpcrs', '.', module, ] cmd.extend(sorted(idls)) all_xpts.append('$(MOZ_OBJ_ROOT)/%s/%s.xpt' % (backend_file.relobjdir, module)) outputs = ['%s.xpt' % module] outputs.extend([ '$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in sorted(idls) ]) outputs.extend([ '$(MOZ_OBJ_ROOT)/dist/xpcrs/rt/%s.rs' % f for f in sorted(idls) ]) outputs.extend([ '$(MOZ_OBJ_ROOT)/dist/xpcrs/bt/%s.rs' % f for f in sorted(idls) ]) backend_file.rule( inputs=[ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py', '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py', self._installed_idls, ], display='XPIDL %s' % module, cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], ) cpp_backend_file = self._get_backend_file('xpcom/reflect/xptinfo') cpp_backend_file.export_shell() cpp_backend_file.rule( inputs=all_xpts, display='XPIDL xptcodegen.py %o', cmd=[ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '$(topsrcdir)/xpcom/reflect/xptinfo/xptcodegen.py', '%o', '%f', ], outputs=['xptdata.cpp'], ) def _preprocess(self, backend_file, input_file, destdir=None, target=None): if target is None: target = mozpath.basename(input_file) # .css files use '%' as the preprocessor marker, which must be scaped as # '%%' in the Tupfile. marker = '%%' if target.endswith('.css') else '#' cmd = self._py_action('preprocessor') cmd.extend([shell_quote(d) for d in backend_file.defines]) cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker]) base_input = mozpath.basename(target) if base_input.endswith('.in'): base_input = mozpath.splitext(base_input)[0] output = mozpath.join(destdir, base_input) if destdir else base_input backend_file.rule( inputs=[input_file], extra_inputs=[self._early_generated_files], display='Preprocess %o', cmd=cmd, outputs=[output], ) def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources, sorted_nonstatic_ipdl_sources, sorted_static_ipdl_sources, unified_ipdl_cppsrcs_mapping): # Preferably we wouldn't have to import ipdl, but we need to parse the # ast in order to determine the namespaces since they are used in the # header output paths. sys.path.append(mozpath.join(self.environment.topsrcdir, 'ipc', 'ipdl')) import ipdl backend_file = self._get_backend_file('ipc/ipdl') outheaderdir = '_ipdlheaders' srcdir = mozpath.join(self.environment.topsrcdir, 'ipc/ipdl') cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '%s/ipdl.py' % srcdir, '--sync-msg-list=%s/sync-messages.ini' % srcdir, '--msg-metadata=%s/message-metadata.ini' % srcdir, '--outheaders-dir=%s' % outheaderdir, '--outcpp-dir=.', ] ipdldirs = sorted(set(mozpath.dirname(p) for p in sorted_ipdl_sources)) cmd.extend(['-I%s' % d for d in ipdldirs]) cmd.extend(sorted_ipdl_sources) outputs = [ 'IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py' ] for filename in sorted_ipdl_sources: filepath, ext = os.path.splitext(filename) dirname, basename = os.path.split(filepath) dirname = mozpath.relpath(dirname, self.environment.topsrcdir) extensions = [''] if ext == '.ipdl': extensions.extend(['Child', 'Parent']) with open(filename) as f: ast = ipdl.parse(f.read(), filename, includedirs=ipdldirs) self.backend_input_files.add(filename) headerdir = os.path.join(outheaderdir, *([ns.name for ns in ast.namespaces])) for extension in extensions: outputs.append("%s%s.cpp" % (basename, extension)) outputs.append( mozpath.join(headerdir, '%s%s.h' % (basename, extension))) backend_file.rule( display='IPDL code generation', cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) backend_file.sources['.cpp'].extend( u[0] for u in unified_ipdl_cppsrcs_mapping) def _handle_webidl_build(self, bindings_dir, unified_source_mapping, webidls, expected_build_output_files, global_define_files): backend_file = self._get_backend_file('dom/bindings') backend_file.export_shell() for source in sorted(webidls.all_preprocessed_sources()): self._preprocess(backend_file, source) cmd = self._py_action('webidl') cmd.append(mozpath.join(self.environment.topsrcdir, 'dom', 'bindings')) # The WebIDLCodegenManager knows all of the .cpp and .h files that will # be created (expected_build_output_files), but there are a few # additional files that are also created by the webidl py_action. outputs = [ '_cache/webidlyacc.py', 'codegen.json', 'codegen.pp', 'parser.out', ] outputs.extend(expected_build_output_files) backend_file.rule( display='WebIDL code generation', cmd=cmd, inputs=webidls.all_non_static_basenames(), outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) backend_file.sources['.cpp'].extend(u[0] for u in unified_source_mapping) backend_file.sources['.cpp'].extend(sorted(global_define_files)) test_backend_file = self._get_backend_file('dom/bindings/test') test_backend_file.sources['.cpp'].extend( sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
def _reset_state(self): self._tests_by_path = OrderedDefaultDict(list) self._tests_by_flavor = defaultdict(set) self._test_dirs = set()
class TestMetadata(object): """Holds information about tests. This class provides an API to query tests active in the build configuration. """ def __init__(self, all_tests, test_defaults=None): self._tests_by_path = OrderedDefaultDict(list) self._tests_by_flavor = defaultdict(set) self._test_dirs = set() with open(all_tests, 'rb') as fh: test_data = pickle.load(fh) defaults = None if test_defaults: with open(test_defaults, 'rb') as fh: defaults = pickle.load(fh) for path, tests in test_data.items(): for metadata in tests: if defaults: defaults_manifests = [metadata['manifest']] ancestor_manifest = metadata.get('ancestor-manifest') if ancestor_manifest: defaults_manifests.append(ancestor_manifest) for manifest in defaults_manifests: manifest_defaults = defaults.get(manifest) if manifest_defaults: metadata = manifestparser.combine_fields(manifest_defaults, metadata) self._tests_by_path[path].append(metadata) self._test_dirs.add(os.path.dirname(path)) flavor = metadata.get('flavor') self._tests_by_flavor[flavor].add(path) def tests_with_flavor(self, flavor): """Obtain all tests having the specified flavor. This is a generator of dicts describing each test. """ for path in sorted(self._tests_by_flavor.get(flavor, [])): yield self._tests_by_path[path] def resolve_tests(self, paths=None, flavor=None, subsuite=None, under_path=None, tags=None): """Resolve tests from an identifier. This is a generator of dicts describing each test. ``paths`` can be an iterable of values to use to identify tests to run. If an entry is a known test file, tests associated with that file are returned (there may be multiple configurations for a single file). If an entry is a directory, or a prefix of a directory containing tests, all tests in that directory are returned. If the string appears in a known test file, that test file is considered. If the path contains a wildcard pattern, tests matching that pattern are returned. If ``under_path`` is a string, it will be used to filter out tests that aren't in the specified path prefix relative to topsrcdir or the test's installed dir. If ``flavor`` is a string, it will be used to filter returned tests to only be the flavor specified. A flavor is something like ``xpcshell``. If ``subsuite`` is a string, it will be used to filter returned tests to only be in the subsuite specified. If ``tags`` are specified, they will be used to filter returned tests to only those with a matching tag. """ if tags: tags = set(tags) def fltr(tests): for test in tests: if flavor: if flavor == 'devtools' and test.get('flavor') != 'browser-chrome': continue if flavor != 'devtools' and test.get('flavor') != flavor: continue if subsuite and test.get('subsuite') != subsuite: continue if tags and not (tags & set(test.get('tags', '').split())): continue if under_path and not test['file_relpath'].startswith(under_path): continue # Make a copy so modifications don't change the source. yield dict(test) paths = paths or [] paths = [mozpath.normpath(p) for p in paths] if not paths: paths = [None] candidate_paths = set() for path in sorted(paths): if path is None: candidate_paths |= set(self._tests_by_path.keys()) continue if '*' in path: candidate_paths |= {p for p in self._tests_by_path if mozpath.match(p, path)} continue # If the path is a directory, or the path is a prefix of a directory # containing tests, pull in all tests in that directory. if (path in self._test_dirs or any(p.startswith(path) for p in self._tests_by_path)): candidate_paths |= {p for p in self._tests_by_path if p.startswith(path)} continue # If it's a test file, add just that file. candidate_paths |= {p for p in self._tests_by_path if path in p} for p in sorted(candidate_paths): tests = self._tests_by_path[p] for test in fltr(tests): yield test
class FasterMakeBackend(CommonBackend, PartialBackend): def _init(self): super(FasterMakeBackend, self)._init() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._has_xpidl = False def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] if target.endswith('.css'): kwargs['marker'] = '%' depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): defines = obj.defines or {} if defines: defines = defines.defines for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, target=f.target_basename, defines=defines) elif '*' in f: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self._install_manifests[obj.install_target] \ .add_pattern_link( prefix, f.full_path[len(prefix):], mozpath.join(path, f.target_basename)) else: self._install_manifests[obj.install_target].add_link( f.full_path, mozpath.join(path, f.target_basename)) if isinstance(f, ObjDirPath): dep_target = 'install-%s' % obj.install_target self._dependencies[dep_target].append( mozpath.relpath(f.full_path, self.environment.topobjdir)) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # We're not actually handling XPIDL files. return False else: return False return True def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) if not self._has_xpidl: mk.add_statement('NO_XPIDL = 1') # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): value = self.environment.substs.get(var) if value is not None: mk.add_statement('%s = %s' % (var, value)) install_manifests_bases = self._install_manifests.keys() # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) install_target = mozpath.basedir(target, install_manifests_bases) self._install_manifests[install_target].add_content( ''.join('%s\n' % e for e in sorted(entries)), mozpath.relpath(target, install_target)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies('$(TOPOBJDIR)/%s' % d for d in deps) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) # For artifact builds only, write a single unified manifest for consumption by |mach watch|. if self.environment.is_artifact_build: unified_manifest = InstallManifest() for base, install_manifest in self._install_manifests.iteritems(): # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash. assert base.startswith('dist/bin') base = base[len('dist/bin'):] if base and base[0] == '/': base = base[1:] unified_manifest.add_entries_from(install_manifest, base=base) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'unified_install_dist_bin')) as fh: unified_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
class FasterMakeBackend(CommonBackend): def _init(self): super(FasterMakeBackend, self)._init() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._has_xpidl = False def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith(".in"): target = target[:-3] if target.endswith(".css"): kwargs["marker"] = "%" depfile = mozpath.join( self.environment.topobjdir, "faster", ".deps", mozpath.join(obj.install_target, dest, target).replace("/", "_"), ) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs ) def consume_object(self, obj): if isinstance(obj, JARManifest) and obj.install_target.startswith("dist/bin"): self._consume_jar_manifest(obj) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and obj.install_target.startswith( "dist/bin" ): defines = obj.defines or {} if defines: defines = defines.defines for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, target=f.target_basename, defines=defines) elif "*" in f: def _prefix(s): for p in mozpath.split(s): if "*" not in p: yield p + "/" prefix = "".join(_prefix(f.full_path)) self._install_manifests[obj.install_target].add_pattern_symlink( prefix, f.full_path[len(prefix) :], mozpath.join(path, f.target_basename) ) else: self._install_manifests[obj.install_target].add_symlink( f.full_path, mozpath.join(path, f.target_basename) ) if isinstance(f, ObjDirPath): dep_target = "install-%s" % obj.install_target self._dependencies[dep_target].append(mozpath.relpath(f.full_path, self.environment.topobjdir)) elif isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith("dist/bin"): top_level = mozpath.join(obj.install_target, "chrome.manifest") if obj.path != top_level: entry = "manifest %s" % mozpath.relpath(obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # We currently ignore a lot of object types, so just acknowledge # everything. return True def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(["default"]) mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir) mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir) mk.add_statement("BACKEND = %s" % self._backend_output_list_file) if not self._has_xpidl: mk.add_statement("NO_XPIDL = 1") # Add a few necessary variables inherited from configure for var in ("PYTHON", "ACDEFINES", "MOZ_BUILD_APP", "MOZ_WIDGET_TOOLKIT"): mk.add_statement("%s = %s" % (var, self.environment.substs[var])) install_manifests_bases = self._install_manifests.keys() # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) install_target = mozpath.basedir(target, install_manifests_bases) self._install_manifests[install_target].add_content( "".join("%s\n" % e for e in sorted(entries)), mozpath.relpath(target, install_target) ) # Add information for install manifests. mk.add_statement("INSTALL_MANIFESTS = %s" % " ".join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies("$(TOPOBJDIR)/%s" % d for d in deps) # Add backend dependencies: mk.create_rule([self._backend_output_list_file]).add_dependencies(self.backend_input_files) mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk") for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, "faster", "install_%s" % base.replace("/", "_")) ) as fh: install_manifest.write(fileobj=fh) with self._write_file(mozpath.join(self.environment.topobjdir, "faster", "Makefile")) as fh: mk.dump(fh, removal_guard=False)
class FasterMakeBackend(MakeBackend, PartialBackend): def _init(self): super(FasterMakeBackend, self)._init() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._l10n_dependencies = OrderedDefaultDict(list) self._has_xpidl = False self._generated_files_map = {} self._generated_files = [] def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] if target.endswith('.css'): kwargs['marker'] = '%' depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): ab_cd = self.environment.substs['MOZ_UI_LOCALE'][0] localized = isinstance( obj, (LocalizedFiles, LocalizedPreprocessedFiles)) defines = obj.defines or {} if defines: defines = defines.defines for path, files in obj.files.walk(): for f in files: # For localized files we need to find the file from the locale directory. if (localized and not isinstance(f, ObjDirPath) and ab_cd != 'en-US'): src = self.localized_path(obj.relsrcdir, f) dep_target = 'install-%s' % obj.install_target if '*' not in src: merge = mozpath.abspath( mozpath.join(self.environment.topobjdir, 'l10n_merge', obj.relsrcdir, f)) self._l10n_dependencies[dep_target].append( (merge, f.full_path, src)) src = merge else: src = f.full_path if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, src, path, target=f.target_basename, defines=defines) elif '*' in f: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) if '*' in f.target_basename: target = path else: target = mozpath.join(path, f.target_basename) mozpath.join(path, f.target_basename) self._install_manifests[obj.install_target] \ .add_pattern_link( prefix, src[len(prefix):], target) else: self._install_manifests[obj.install_target].add_link( src, mozpath.join(path, f.target_basename)) if isinstance(f, ObjDirPath): dep_target = 'install-%s' % obj.install_target dep = mozpath.relpath(f.full_path, self.environment.topobjdir) if dep in self._generated_files_map: # Only the first output file is specified as a # dependency. If there are multiple output files # from a single GENERATED_FILES invocation that are # installed, we only want to run the command once. dep = self._generated_files_map[dep] self._dependencies[dep_target].append(dep) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, GeneratedFile): if obj.outputs: first_output = mozpath.relpath( mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir) for o in obj.outputs[1:]: fullpath = mozpath.join(obj.objdir, o) self._generated_files_map[mozpath.relpath( fullpath, self.environment.topobjdir)] = first_output self._generated_files.append(obj) return False elif isinstance(obj, XPIDLModule): self._has_xpidl = True # We're not actually handling XPIDL files. return False else: return False return True def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) mk.add_statement('MDDEPDIR = .deps') mk.add_statement('TOUCH ?= touch') mk.add_statement('include $(TOPSRCDIR)/config/makefiles/functions.mk') mk.add_statement('include $(TOPSRCDIR)/config/AB_rCD.mk') mk.add_statement('AB_CD = en-US') if not self._has_xpidl: mk.add_statement('NO_XPIDL = 1') # Add a few necessary variables inherited from configure for var in ( 'PYTHON3', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): value = self.environment.substs.get(var) if value is not None: mk.add_statement('%s = %s' % (var, value)) install_manifests_bases = self._install_manifests.keys() # Add information for chrome manifest generation manifest_targets = [] for target, entries in six.iteritems(self._manifest_entries): manifest_targets.append(target) install_target = mozpath.basedir(target, install_manifests_bases) self._install_manifests[install_target].add_content( ''.join('%s\n' % e for e in sorted(entries)), mozpath.relpath(target, install_target)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(sorted(self._install_manifests.keys()))) # Add dependencies we inferred: for target, deps in sorted(six.iteritems(self._dependencies)): mk.create_rule([target]).add_dependencies('$(TOPOBJDIR)/%s' % d for d in sorted(deps)) # This is not great, but it's better to have some dependencies on these Python files. python_deps = [ '$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py', '$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/compare.py', '$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/paths.py', ] # Add l10n dependencies we inferred: for target, deps in sorted(six.iteritems(self._l10n_dependencies)): mk.create_rule([target]).add_dependencies( '%s' % d[0] for d in sorted(deps, key=itemgetter(0))) for (merge, ref_file, l10n_file) in deps: rule = mk.create_rule([ merge ]).add_dependencies([ref_file, l10n_file] + python_deps) rule.add_commands([ '$(PYTHON3) -m mozbuild.action.l10n_merge ' '--output {} --ref-file {} --l10n-file {}'.format( merge, ref_file, l10n_file) ]) # Add a dummy rule for the l10n file since it might not exist. mk.create_rule([l10n_file]) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in six.iteritems(self._install_manifests): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) # For artifact builds only, write a single unified manifest # for consumption by |mach watch|. if self.environment.is_artifact_build: unified_manifest = InstallManifest() for base, install_manifest in six.iteritems( self._install_manifests): # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash. assert base.startswith('dist/bin') base = base[len('dist/bin'):] if base and base[0] == '/': base = base[1:] unified_manifest.add_entries_from(install_manifest, base=base) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'unified_install_dist_bin')) as fh: unified_manifest.write(fileobj=fh) for obj in self._generated_files: for stmt in self._format_statements_for_generated_file( obj, 'default'): mk.add_statement(stmt) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False) def _pretty_path(self, path, obj): if path.startswith(self.environment.topobjdir): return mozpath.join( '$(TOPOBJDIR)', mozpath.relpath(path, self.environment.topobjdir)) elif path.startswith(self.environment.topsrcdir): return mozpath.join( '$(TOPSRCDIR)', mozpath.relpath(path, self.environment.topsrcdir)) else: return path def _format_generated_file_input_name(self, path, obj): return self._pretty_path(path.full_path, obj) def _format_generated_file_output_name(self, path, obj): if not isinstance(path, Path): path = ObjDirPath(obj._context, '!' + path) return self._pretty_path(path.full_path, obj)
class FasterMakeBackend(CommonBackend): def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._has_xpidl = False def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj, defines) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, defines=defines) else: self._install_manifests[ obj.install_target].add_symlink( f.full_path, mozpath.join(path, mozpath.basename(f))) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # XPIDL are emitted before Defines, which breaks the assert in the # branch for Defines. OTOH, we don't actually care about the # XPIDLFile objects just yet, so we can just pretend we didn't see # an object in the directory yet. return True else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True def _consume_jar_manifest(self, obj, defines): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() pp.context.update(defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() pp.do_include(obj.path) self.backend_input_files |= pp.includes for jarinfo in pp.out: install_target = obj.install_target if jarinfo.base: install_target = mozpath.normpath( mozpath.join(install_target, jarinfo.base)) for e in jarinfo.entries: if e.is_locale: if jarinfo.relativesrcdir: path = mozpath.join(self.environment.topsrcdir, jarinfo.relativesrcdir) else: path = mozpath.dirname(obj.path) src = mozpath.join(path, 'en-US', e.source) elif e.source.startswith('/'): src = mozpath.join(self.environment.topsrcdir, e.source[1:]) else: src = mozpath.join(mozpath.dirname(obj.path), e.source) if '*' in e.source: if e.preprocess: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) self._install_manifests[install_target] \ .add_pattern_symlink( prefix, src[len(prefix):], mozpath.join(jarinfo.name, e.output)) continue if not os.path.exists(src): if e.is_locale: raise Exception('%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = mozpath.join(self.environment.topobjdir, e.source[1:]) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = mozpath.join(obj.objdir, e.source) self._dependencies['install-%s' % install_target] \ .append(mozpath.relpath( src, self.environment.topobjdir)) if e.preprocess: kwargs = {} if src.endswith('.css'): kwargs['marker'] = '%' self._add_preprocess(obj, src, mozpath.join( jarinfo.name, mozpath.dirname(e.output)), mozpath.basename(e.output), defines=defines, **kwargs) else: self._install_manifests[install_target].add_symlink( src, mozpath.join(jarinfo.name, e.output)) manifest = mozpath.normpath( mozpath.join(install_target, jarinfo.name)) manifest += '.manifest' for m in jarinfo.chrome_manifests: self._manifest_entries[manifest].add( m.replace('%', mozpath.basename(jarinfo.name) + '/')) if jarinfo.name != 'chrome': manifest = mozpath.normpath( mozpath.join(install_target, 'chrome.manifest')) entry = 'manifest %s.manifest' % jarinfo.name self._manifest_entries[manifest].add(entry) def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) mk.add_statement('BACKEND = %s' % self._backend_output_list_file) if not self._has_xpidl: mk.add_statement('NO_XPIDL = 1') # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): mk.add_statement('%s = %s' % (var, self.environment.substs[var])) install_manifests_bases = self._install_manifests.keys() # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) install_target = mozpath.basedir(target, install_manifests_bases) self._install_manifests[install_target].add_content( ''.join('%s\n' % e for e in sorted(entries)), mozpath.relpath(target, install_target)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies('$(TOPOBJDIR)/%s' % d for d in deps) # Add backend dependencies: mk.create_rule([self._backend_output_list_file ]).add_dependencies(self.backend_input_files) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
class TupOnly(CommonBackend, PartialBackend): """Backend that generates Tupfiles for the tup build system. """ def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) self._compile_env_gen_files = ( '*.c', '*.cpp', '*.h', '*.inc', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' def _get_backend_file(self, relobjdir): objdir = mozpath.normpath(mozpath.join(self.environment.topobjdir, relobjdir)) if objdir not in self._backend_files: self._backend_files[objdir] = \ BackendTupfile(objdir, self.environment, self.environment.topsrcdir, self.environment.topobjdir) return self._backend_files[objdir] def _get_backend_file_for(self, obj): return self._get_backend_file(obj.relobjdir) def _py_action(self, action): cmd = [ '$(PYTHON)', '-m', 'mozbuild.action.%s' % action, ] return cmd def consume_object(self, obj): """Write out build files necessary to build with tup.""" if not isinstance(obj, ContextDerived): return False consumed = CommonBackend.consume_object(self, obj) if consumed: return True backend_file = self._get_backend_file_for(obj) if isinstance(obj, GeneratedFile): # These files are already generated by make before tup runs. skip_files = ( 'buildid.h', 'source-repo.h', ) if self.environment.is_artifact_build: skip_files = skip_files + self._compile_env_gen_files for f in obj.outputs: if any(mozpath.match(f, p) for p in skip_files): return False if 'application.ini.h' in obj.outputs: # application.ini.h is a special case since we need to process # the FINAL_TARGET_PP_FILES for application.ini before running # the GENERATED_FILES script, and tup doesn't handle the rules # out of order. backend_file.delayed_generated_files.append(obj) else: self._process_generated_file(backend_file, obj) elif (isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith('dist/bin')): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath(obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, Defines): self._process_defines(backend_file, obj) elif isinstance(obj, HostDefines): self._process_defines(backend_file, obj, host=True) elif isinstance(obj, FinalTargetFiles): self._process_final_target_files(obj) elif isinstance(obj, FinalTargetPreprocessedFiles): self._process_final_target_pp_files(obj, backend_file) elif isinstance(obj, JARManifest): self._consume_jar_manifest(obj) elif isinstance(obj, PerSourceFlag): backend_file.per_source_flags[obj.file_name].extend(obj.flags) elif isinstance(obj, ComputedFlags): self._process_computed_flags(obj, backend_file) elif isinstance(obj, (Sources, GeneratedSources)): backend_file.sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, HostSources): backend_file.host_sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, VariablePassthru): backend_file.variables = obj.variables return True def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file(mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) for objdir, backend_file in sorted(self._backend_files.items()): for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) backend_file.gen_sources_rules([self._installed_files]) with self._write_file(fh=backend_file): pass with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value)) for (name, value) in sorted(self.environment.acdefines.iteritems())]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # TODO: BOOKMARKS_INCLUDE_DIR is used by bookmarks.html.in, and is # only defined in browser/locales/Makefile.in acdefines_flags += ' -DBOOKMARKS_INCLUDE_DIR=%s/browser/locales/en-US/profile' % self.environment.topsrcdir # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % ( os.path.relpath(self.environment.topsrcdir, self.environment.topobjdir) )) fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n') fh.write('PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write('IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n') # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init']) def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'layout/style/test', # HostSimplePrograms 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) extra_outputs = [self._installed_files] if obj.required_for_compile else None backend_file.rule( display='python {script}:{method} -> [%o]'.format(script=obj.script, method=obj.method), cmd=cmd, inputs=full_inputs, outputs=outputs, extra_outputs=extra_outputs, ) def _process_defines(self, backend_file, obj, host=False): defines = list(obj.get_defines()) if defines: if host: backend_file.host_defines = defines else: backend_file.defines = defines def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) if target.startswith('_tests'): # TODO: TEST_HARNESS_FILES present a few challenges for the tup # backend (bug 1372381). return for path, files in obj.files.walk(): for f in files: if not isinstance(f, ObjDirPath): backend_file = self._get_backend_file(mozpath.join(target, path)) if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception("Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self.backend_input_files.add(prefix) finder = FileFinder(prefix) for p, _ in finder.find(f.full_path[len(prefix):]): backend_file.symlink_rule(mozpath.join(prefix, p), output=mozpath.join(f.target_basename, p), output_group=self._installed_files) else: backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=self._installed_files) else: if (self.environment.is_artifact_build and any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)): # If we have an artifact build we never would have generated this file, # so do not attempt to install it. continue # We're not generating files in these directories yet, so # don't attempt to install files generated from them. if f.context.relobjdir not in ('layout/style/test', 'toolkit/library'): output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path, f.target_basename) gen_backend_file = self._get_backend_file(f.context.relobjdir) gen_backend_file.symlink_rule(f.full_path, output=output, output_group=self._installed_files) def _process_final_target_pp_files(self, obj, backend_file): for i, (path, files) in enumerate(obj.files.walk()): for f in files: self._preprocess(backend_file, f.full_path, destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path)) def _process_computed_flags(self, obj, backend_file): for var, flags in obj.get_flags(): backend_file.local_flags[var] = flags def _process_unified_sources(self, obj): backend_file = self._get_backend_file_for(obj) files = [f[0] for f in obj.unified_source_mapping] backend_file.sources[obj.canonical_suffix].extend(files) def _handle_idl_manager(self, manager): if self.environment.is_artifact_build: return dist_idl_backend_file = self._get_backend_file('dist/idl') for idl in manager.idls.values(): dist_idl_backend_file.symlink_rule(idl['source'], output_group=self._installed_idls) backend_file = self._get_backend_file('xpcom/xpidl') backend_file.export_shell() for module, data in sorted(manager.modules.iteritems()): dest, idls = data cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '-I$(IDL_PARSER_DIR)', '-I$(IDL_PARSER_CACHE_DIR)', '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py', '--cache-dir', '$(IDL_PARSER_CACHE_DIR)', '$(DIST)/idl', '$(DIST)/include', '$(DIST)/xpcrs', '$(MOZ_OBJ_ROOT)/%s/components' % dest, module, ] cmd.extend(sorted(idls)) outputs = ['$(MOZ_OBJ_ROOT)/%s/components/%s.xpt' % (dest, module)] outputs.extend(['$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in sorted(idls)]) outputs.extend(['$(MOZ_OBJ_ROOT)/dist/xpcrs/rt/%s.rs' % f for f in sorted(idls)]) outputs.extend(['$(MOZ_OBJ_ROOT)/dist/xpcrs/bt/%s.rs' % f for f in sorted(idls)]) backend_file.rule( inputs=[ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py', '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py', self._installed_idls, ], display='XPIDL %s' % module, cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], ) for manifest, entries in manager.interface_manifests.items(): for xpt in entries: self._manifest_entries[manifest].add('interfaces %s' % xpt) for m in manager.chrome_manifests: self._manifest_entries[m].add('manifest components/interfaces.manifest') def _preprocess(self, backend_file, input_file, destdir=None): # .css files use '%' as the preprocessor marker, which must be scaped as # '%%' in the Tupfile. marker = '%%' if input_file.endswith('.css') else '#' cmd = self._py_action('preprocessor') cmd.extend([shell_quote(d) for d in backend_file.defines]) cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker]) base_input = mozpath.basename(input_file) if base_input.endswith('.in'): base_input = mozpath.splitext(base_input)[0] output = mozpath.join(destdir, base_input) if destdir else base_input backend_file.rule( inputs=[input_file], display='Preprocess %o', cmd=cmd, outputs=[output], ) def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources, sorted_nonstatic_ipdl_sources, sorted_static_ipdl_sources, unified_ipdl_cppsrcs_mapping): # Preferably we wouldn't have to import ipdl, but we need to parse the # ast in order to determine the namespaces since they are used in the # header output paths. sys.path.append(mozpath.join(self.environment.topsrcdir, 'ipc', 'ipdl')) import ipdl backend_file = self._get_backend_file('ipc/ipdl') outheaderdir = '_ipdlheaders' srcdir = mozpath.join(self.environment.topsrcdir, 'ipc/ipdl') cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '%s/ipdl.py' % srcdir, '--sync-msg-list=%s/sync-messages.ini' % srcdir, '--msg-metadata=%s/message-metadata.ini' % srcdir, '--outheaders-dir=%s' % outheaderdir, '--outcpp-dir=.', ] ipdldirs = sorted(set(mozpath.dirname(p) for p in sorted_ipdl_sources)) cmd.extend(['-I%s' % d for d in ipdldirs]) cmd.extend(sorted_ipdl_sources) outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py'] for filename in sorted_ipdl_sources: filepath, ext = os.path.splitext(filename) dirname, basename = os.path.split(filepath) dirname = mozpath.relpath(dirname, self.environment.topsrcdir) extensions = [''] if ext == '.ipdl': extensions.extend(['Child', 'Parent']) with open(filename) as f: ast = ipdl.parse(f.read(), filename, includedirs=ipdldirs) self.backend_input_files.add(filename) headerdir = os.path.join(outheaderdir, *([ns.name for ns in ast.namespaces])) for extension in extensions: outputs.append("%s%s.cpp" % (basename, extension)) outputs.append(mozpath.join(headerdir, '%s%s.h' % (basename, extension))) backend_file.rule( display='IPDL code generation', cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) backend_file.sources['.cpp'].extend(u[0] for u in unified_ipdl_cppsrcs_mapping) def _handle_webidl_build(self, bindings_dir, unified_source_mapping, webidls, expected_build_output_files, global_define_files): backend_file = self._get_backend_file('dom/bindings') backend_file.export_shell() for source in sorted(webidls.all_preprocessed_sources()): self._preprocess(backend_file, source) cmd = self._py_action('webidl') cmd.append(mozpath.join(self.environment.topsrcdir, 'dom', 'bindings')) # The WebIDLCodegenManager knows all of the .cpp and .h files that will # be created (expected_build_output_files), but there are a few # additional files that are also created by the webidl py_action. outputs = [ '_cache/webidlyacc.py', 'codegen.json', 'codegen.pp', 'parser.out', ] outputs.extend(expected_build_output_files) backend_file.rule( display='WebIDL code generation', cmd=cmd, inputs=webidls.all_non_static_basenames(), outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) backend_file.sources['.cpp'].extend(u[0] for u in unified_source_mapping) backend_file.sources['.cpp'].extend(sorted(global_define_files)) test_backend_file = self._get_backend_file('dom/bindings/test') test_backend_file.sources['.cpp'].extend(sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
class TupOnly(CommonBackend, PartialBackend): """Backend that generates Tupfiles for the tup build system. """ def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) # This is a 'group' dependency - All rules that list this as an output # will be built before any rules that list this as an input. self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' def _get_backend_file(self, relativedir): objdir = mozpath.join(self.environment.topobjdir, relativedir) srcdir = mozpath.join(self.environment.topsrcdir, relativedir) if objdir not in self._backend_files: self._backend_files[objdir] = \ BackendTupfile(srcdir, objdir, self.environment, self.environment.topsrcdir, self.environment.topobjdir) return self._backend_files[objdir] def _get_backend_file_for(self, obj): return self._get_backend_file(obj.relativedir) def _py_action(self, action): cmd = [ '$(PYTHON)', '-m', 'mozbuild.action.%s' % action, ] return cmd def consume_object(self, obj): """Write out build files necessary to build with tup.""" if not isinstance(obj, ContextDerived): return False consumed = CommonBackend.consume_object(self, obj) if consumed: return True backend_file = self._get_backend_file_for(obj) if isinstance(obj, GeneratedFile): # These files are already generated by make before tup runs. skip_files = ( 'buildid.h', 'source-repo.h', ) if any(f in skip_files for f in obj.outputs): # Let the RecursiveMake backend handle these. return False if 'application.ini.h' in obj.outputs: # application.ini.h is a special case since we need to process # the FINAL_TARGET_PP_FILES for application.ini before running # the GENERATED_FILES script, and tup doesn't handle the rules # out of order. backend_file.delayed_generated_files.append(obj) else: self._process_generated_file(backend_file, obj) elif (isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith('dist/bin')): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, Defines): self._process_defines(backend_file, obj) elif isinstance(obj, HostDefines): self._process_defines(backend_file, obj, host=True) elif isinstance(obj, FinalTargetFiles): self._process_final_target_files(obj) elif isinstance(obj, FinalTargetPreprocessedFiles): self._process_final_target_pp_files(obj, backend_file) elif isinstance(obj, JARManifest): self._consume_jar_manifest(obj) return True def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) for objdir, backend_file in sorted(self._backend_files.items()): for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) with self._write_file(fh=backend_file): pass with self._write_file( mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines = [ name for name in self.environment.defines if not name in self.environment.non_global_defines ] acdefines_flags = ' '.join([ '-D%s=%s' % (name, shell_quote(self.environment.defines[name])) for name in sorted(acdefines) ]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # TODO: BOOKMARKS_INCLUDE_DIR is used by bookmarks.html.in, and is # only defined in browser/locales/Makefile.in acdefines_flags += ' -DBOOKMARKS_INCLUDE_DIR=%s/browser/locales/en-US/profile' % self.environment.topsrcdir # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % (os.path.relpath( self.environment.topsrcdir, self.environment.topobjdir))) fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n') fh.write( 'PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write( 'IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n' ) # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init']) def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'layout/style/test', # HostSimplePrograms 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) backend_file.rule( display='python {script}:{method} -> [%o]'.format( script=obj.script, method=obj.method), cmd=cmd, inputs=full_inputs, outputs=outputs, ) def _process_defines(self, backend_file, obj, host=False): defines = list(obj.get_defines()) if defines: if host: backend_file.host_defines = defines else: backend_file.defines = defines def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/branding', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) for path, files in obj.files.walk(): backend_file = self._get_backend_file(mozpath.join(target, path)) for f in files: if not isinstance(f, ObjDirPath): if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception( "Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: # TODO: This is needed for tests pass else: backend_file.symlink_rule( f.full_path, output=f.target_basename, output_group=self._installed_files) else: # TODO: Support installing generated files pass def _process_final_target_pp_files(self, obj, backend_file): for i, (path, files) in enumerate(obj.files.walk()): for f in files: self._preprocess(backend_file, f.full_path, destdir=mozpath.join( self.environment.topobjdir, obj.install_target, path)) def _handle_idl_manager(self, manager): dist_idl_backend_file = self._get_backend_file('dist/idl') for idl in manager.idls.values(): dist_idl_backend_file.symlink_rule( idl['source'], output_group=self._installed_files) backend_file = self._get_backend_file('xpcom/xpidl') backend_file.export_shell() for module, data in sorted(manager.modules.iteritems()): dest, idls = data cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '-I$(IDL_PARSER_DIR)', '-I$(IDL_PARSER_CACHE_DIR)', '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py', '--cache-dir', '$(IDL_PARSER_CACHE_DIR)', '$(DIST)/idl', '$(DIST)/include', '$(MOZ_OBJ_ROOT)/%s/components' % dest, module, ] cmd.extend(sorted(idls)) outputs = ['$(MOZ_OBJ_ROOT)/%s/components/%s.xpt' % (dest, module)] outputs.extend([ '$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in sorted(idls) ]) backend_file.rule( inputs=[ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py', '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py', self._installed_files, ], display='XPIDL %s' % module, cmd=cmd, outputs=outputs, ) for manifest, entries in manager.interface_manifests.items(): for xpt in entries: self._manifest_entries[manifest].add('interfaces %s' % xpt) for m in manager.chrome_manifests: self._manifest_entries[m].add( 'manifest components/interfaces.manifest') def _preprocess(self, backend_file, input_file, destdir=None): # .css files use '%' as the preprocessor marker, which must be scaped as # '%%' in the Tupfile. marker = '%%' if input_file.endswith('.css') else '#' cmd = self._py_action('preprocessor') cmd.extend([shell_quote(d) for d in backend_file.defines]) cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker]) base_input = mozpath.basename(input_file) if base_input.endswith('.in'): base_input = mozpath.splitext(base_input)[0] output = mozpath.join(destdir, base_input) if destdir else base_input backend_file.rule( inputs=[input_file], display='Preprocess %o', cmd=cmd, outputs=[output], ) def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources, unified_ipdl_cppsrcs_mapping): # TODO: This isn't implemented yet in the tup backend, but it is called # by the CommonBackend. pass def _handle_webidl_build(self, bindings_dir, unified_source_mapping, webidls, expected_build_output_files, global_define_files): backend_file = self._get_backend_file('dom/bindings') backend_file.export_shell() for source in sorted(webidls.all_preprocessed_sources()): self._preprocess(backend_file, source) cmd = self._py_action('webidl') cmd.append(mozpath.join(self.environment.topsrcdir, 'dom', 'bindings')) # The WebIDLCodegenManager knows all of the .cpp and .h files that will # be created (expected_build_output_files), but there are a few # additional files that are also created by the webidl py_action. outputs = [ '_cache/webidlyacc.py', 'codegen.json', 'codegen.pp', 'parser.out', ] outputs.extend(expected_build_output_files) backend_file.rule( display='WebIDL code generation', cmd=cmd, inputs=webidls.all_non_static_basenames(), outputs=outputs, check_unchanged=True, )
class TreeMetadataEmitter(LoggingMixin): """Converts the executed mozbuild files into data structures. This is a bridge between reader.py and data.py. It takes what was read by reader.BuildReader and converts it into the classes defined in the data module. """ def __init__(self, config): self.populate_logger() self.config = config mozinfo.find_and_update_from_json(config.topobjdir) # Python 2.6 doesn't allow unicode keys to be used for keyword # arguments. This gross hack works around the problem until we # rid ourselves of 2.6. self.info = {} for k, v in mozinfo.info.items(): if isinstance(k, unicode): k = k.encode('ascii') self.info[k] = v self._libs = OrderedDefaultDict(list) self._binaries = OrderedDict() self._linkage = [] self._static_linking_shared = set() # Keep track of external paths (third party build systems), starting # from what we run a subconfigure in. We'll eliminate some directories # as we traverse them with moz.build (e.g. js/src). subconfigures = os.path.join(self.config.topobjdir, 'subconfigures') paths = [] if os.path.exists(subconfigures): paths = open(subconfigures).read().splitlines() self._external_paths = set(mozpath.normsep(d) for d in paths) # Add security/nss manually, since it doesn't have a subconfigure. self._external_paths.add('security/nss') def emit(self, output): """Convert the BuildReader output into data structures. The return value from BuildReader.read_topsrcdir() (a generator) is typically fed into this function. """ file_count = 0 sandbox_execution_time = 0.0 emitter_time = 0.0 contexts = {} def emit_objs(objs): for o in objs: yield o if not o._ack: raise Exception('Unhandled object of type %s' % type(o)) for out in output: if isinstance(out, Context): # Keep all contexts around, we will need them later. contexts[out.objdir] = out start = time.time() # We need to expand the generator for the timings to work. objs = list(self.emit_from_context(out)) emitter_time += time.time() - start for o in emit_objs(objs): yield o # Update the stats. file_count += len(out.all_paths) sandbox_execution_time += out.execution_time else: raise Exception('Unhandled output type: %s' % type(out)) start = time.time() objs = list(self._emit_libs_derived(contexts)) emitter_time += time.time() - start for o in emit_objs(objs): yield o yield ReaderSummary(file_count, sandbox_execution_time, emitter_time) def _emit_libs_derived(self, contexts): # First do FINAL_LIBRARY linkage. for lib in (l for libs in self._libs.values() for l in libs): if not isinstance(lib, StaticLibrary) or not lib.link_into: continue if lib.link_into not in self._libs: raise SandboxValidationError( 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME' % lib.link_into, contexts[lib.objdir]) candidates = self._libs[lib.link_into] # When there are multiple candidates, but all are in the same # directory and have a different type, we want all of them to # have the library linked. The typical usecase is when building # both a static and a shared library in a directory, and having # that as a FINAL_LIBRARY. if len(set(type(l) for l in candidates)) == len(candidates) and \ len(set(l.objdir for l in candidates)) == 1: for c in candidates: c.link_library(lib) else: raise SandboxValidationError( 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in ' 'multiple places:\n %s' % (lib.link_into, '\n '.join(l.objdir for l in candidates)), contexts[lib.objdir]) # Next, USE_LIBS linkage. for context, obj, variable in self._linkage: self._link_libraries(context, obj, variable) def recurse_refs(lib): for o in lib.refs: yield o if isinstance(o, StaticLibrary): for q in recurse_refs(o): yield q # Check that all static libraries refering shared libraries in # USE_LIBS are linked into a shared library or program. for lib in self._static_linking_shared: if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)): shared_libs = sorted(l.basename for l in lib.linked_libraries if isinstance(l, SharedLibrary)) raise SandboxValidationError( 'The static "%s" library is not used in a shared library ' 'or a program, but USE_LIBS contains the following shared ' 'library names:\n %s\n\nMaybe you can remove the ' 'static "%s" library?' % (lib.basename, '\n '.join(shared_libs), lib.basename), contexts[lib.objdir]) # Propagate LIBRARY_DEFINES to all child libraries recursively. def propagate_defines(outerlib, defines): outerlib.defines.update(defines) for lib in outerlib.linked_libraries: # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS # paths. if (isinstance(lib, StaticLibrary) and lib.link_into == outerlib.basename): propagate_defines(lib, defines) for lib in (l for libs in self._libs.values() for l in libs): if isinstance(lib, Library): propagate_defines(lib, lib.defines) yield lib for obj in self._binaries.values(): yield obj LIBRARY_NAME_VAR = { 'host': 'HOST_LIBRARY_NAME', 'target': 'LIBRARY_NAME', } def _link_libraries(self, context, obj, variable): """Add linkage declarations to a given object.""" assert isinstance(obj, Linkable) for path in context.get(variable, []): force_static = path.startswith('static:') and obj.KIND == 'target' if force_static: path = path[7:] name = mozpath.basename(path) dir = mozpath.dirname(path) candidates = [l for l in self._libs[name] if l.KIND == obj.KIND] if dir: if dir.startswith('/'): dir = mozpath.normpath(mozpath.join( obj.topobjdir, dir[1:])) else: dir = mozpath.normpath(mozpath.join(obj.objdir, dir)) dir = mozpath.relpath(dir, obj.topobjdir) candidates = [l for l in candidates if l.relobjdir == dir] if not candidates: # If the given directory is under one of the external # (third party) paths, use a fake library reference to # there. for d in self._external_paths: if dir.startswith('%s/' % d): candidates = [ self._get_external_library( dir, name, force_static) ] break if not candidates: raise SandboxValidationError( '%s contains "%s", but there is no "%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), context) if len(candidates) > 1: # If there's more than one remaining candidate, it could be # that there are instances for the same library, in static and # shared form. libs = {} for l in candidates: key = mozpath.join(l.relobjdir, l.basename) if force_static: if isinstance(l, StaticLibrary): libs[key] = l else: if key in libs and isinstance(l, SharedLibrary): libs[key] = l if key not in libs: libs[key] = l candidates = libs.values() if force_static and not candidates: if dir: raise SandboxValidationError( '%s contains "static:%s", but there is no static ' '"%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), context) raise SandboxValidationError( '%s contains "static:%s", but there is no static "%s" ' '%s in the tree' % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]), context) if not candidates: raise SandboxValidationError( '%s contains "%s", which does not match any %s in the tree.' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]), context) elif len(candidates) > 1: paths = (mozpath.join(l.relativedir, 'moz.build') for l in candidates) raise SandboxValidationError( '%s contains "%s", which matches a %s defined in multiple ' 'places:\n %s' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND], '\n '.join(paths)), context) elif force_static and not isinstance(candidates[0], StaticLibrary): raise SandboxValidationError( '%s contains "static:%s", but there is only a shared "%s" ' 'in %s. You may want to add FORCE_STATIC_LIB=True in ' '%s/moz.build, or remove "static:".' % (variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir), context) elif isinstance(obj, StaticLibrary) and isinstance( candidates[0], SharedLibrary): self._static_linking_shared.add(obj) obj.link_library(candidates[0]) # Link system libraries from OS_LIBS/HOST_OS_LIBS. for lib in context.get(variable.replace('USE', 'OS'), []): obj.link_system_library(lib) @memoize def _get_external_library(self, dir, name, force_static): # Create ExternalStaticLibrary or ExternalSharedLibrary object with a # context more or less truthful about where the external library is. context = Context(config=self.config) context.add_source(mozpath.join(self.config.topsrcdir, dir, 'dummy')) if force_static: return ExternalStaticLibrary(context, name) else: return ExternalSharedLibrary(context, name) def emit_from_context(self, context): """Convert a Context to tree metadata objects. This is a generator of mozbuild.frontend.data.ContextDerived instances. """ # We only want to emit an InstallationTarget if one of the consulted # variables is defined. Later on, we look up FINAL_TARGET, which has # the side-effect of populating it. So, we need to do this lookup # early. if any(k in context for k in ('FINAL_TARGET', 'XPI_NAME', 'DIST_SUBDIR')): yield InstallationTarget(context) # We always emit a directory traversal descriptor. This is needed by # the recursive make backend. for o in self._emit_directory_traversal_from_context(context): yield o for path in context['CONFIGURE_SUBST_FILES']: yield self._create_substitution(ConfigFileSubstitution, context, path) for path in context['CONFIGURE_DEFINE_FILES']: yield self._create_substitution(HeaderFileSubstitution, context, path) # XPIDL source files get processed and turned into .h and .xpt files. # If there are multiple XPIDL files in a directory, they get linked # together into a final .xpt, which has the name defined by # XPIDL_MODULE. xpidl_module = context['XPIDL_MODULE'] if context['XPIDL_SOURCES'] and not xpidl_module: raise SandboxValidationError( 'XPIDL_MODULE must be defined if ' 'XPIDL_SOURCES is defined.', context) if xpidl_module and not context['XPIDL_SOURCES']: raise SandboxValidationError( 'XPIDL_MODULE cannot be defined ' 'unless there are XPIDL_SOURCES', context) if context['XPIDL_SOURCES'] and context['NO_DIST_INSTALL']: self.log( logging.WARN, 'mozbuild_warning', dict(path=context.main_path), '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.') for idl in context['XPIDL_SOURCES']: yield XPIDLFile(context, mozpath.join(context.srcdir, idl), xpidl_module) for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'): for src in (context[symbol] or []): if not os.path.exists(mozpath.join(context.srcdir, src)): raise SandboxValidationError( 'File listed in %s does not ' 'exist: \'%s\'' % (symbol, src), context) # Proxy some variables as-is until we have richer classes to represent # them. We should aim to keep this set small because it violates the # desired abstraction of the build definition away from makefiles. passthru = VariablePassthru(context) varlist = [ 'ANDROID_GENERATED_RESFILES', 'ANDROID_RES_DIRS', 'DISABLE_STL_WRAPPING', 'EXTRA_ASSEMBLER_FLAGS', 'EXTRA_COMPILE_FLAGS', 'EXTRA_COMPONENTS', 'EXTRA_DSO_LDOPTS', 'EXTRA_PP_COMPONENTS', 'FAIL_ON_WARNINGS', 'USE_STATIC_LIBS', 'IS_GYP_DIR', 'MSVC_ENABLE_PGO', 'NO_DIST_INSTALL', 'PYTHON_UNIT_TESTS', 'RCFILE', 'RESFILE', 'RCINCLUDE', 'DEFFILE', 'WIN32_EXE_LDFLAGS', 'LD_VERSION_SCRIPT', ] for v in varlist: if v in context and context[v]: passthru.variables[v] = context[v] if context.config.substs.get('OS_TARGET') == 'WINNT' and \ context['DELAYLOAD_DLLS']: context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll) for dll in context['DELAYLOAD_DLLS']]) context['OS_LIBS'].append('delayimp') for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']: if v in context and context[v]: passthru.variables['MOZBUILD_' + v] = context[v] # NO_VISIBILITY_FLAGS is slightly different if context['NO_VISIBILITY_FLAGS']: passthru.variables['VISIBILITY_FLAGS'] = '' no_pgo = context.get('NO_PGO') sources = context.get('SOURCES', []) no_pgo_sources = [f for f in sources if sources[f].no_pgo] if no_pgo: if no_pgo_sources: raise SandboxValidationError( 'NO_PGO and SOURCES[...].no_pgo ' 'cannot be set at the same time', context) passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo if no_pgo_sources: passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources # A map from "canonical suffixes" for a particular source file # language to the range of suffixes associated with that language. # # We deliberately don't list the canonical suffix in the suffix list # in the definition; we'll add it in programmatically after defining # things. suffix_map = { '.s': set(['.asm']), '.c': set(), '.m': set(), '.mm': set(), '.cpp': set(['.cc', '.cxx']), '.S': set(), } # The inverse of the above, mapping suffixes to their canonical suffix. canonicalized_suffix_map = {} for suffix, alternatives in suffix_map.iteritems(): alternatives.add(suffix) for a in alternatives: canonicalized_suffix_map[a] = suffix def canonical_suffix_for_file(f): return canonicalized_suffix_map[mozpath.splitext(f)[1]] # A map from moz.build variables to the canonical suffixes of file # kinds that can be listed therein. all_suffixes = list(suffix_map.keys()) varmap = dict( SOURCES=(Sources, all_suffixes), HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']), UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']), GENERATED_SOURCES=(GeneratedSources, all_suffixes), ) for variable, (klass, suffixes) in varmap.items(): allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes]) # First ensure that we haven't been given filetypes that we don't # recognize. for f in context[variable]: ext = mozpath.splitext(f)[1] if ext not in allowed_suffixes: raise SandboxValidationError( '%s has an unknown file type.' % f, context) if variable.startswith('GENERATED_'): l = passthru.variables.setdefault('GARBAGE', []) l.append(f) # Now sort the files to let groupby work. sorted_files = sorted(context[variable], key=canonical_suffix_for_file) for canonical_suffix, files in itertools.groupby( sorted_files, canonical_suffix_for_file): arglist = [context, list(files), canonical_suffix] if variable.startswith( 'UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context: arglist.append(context['FILES_PER_UNIFIED_FILE']) yield klass(*arglist) sources_with_flags = [f for f in sources if sources[f].flags] for f in sources_with_flags: ext = mozpath.splitext(f)[1] yield PerSourceFlag(context, f, sources[f].flags) exports = context.get('EXPORTS') if exports: yield Exports( context, exports, dist_install=not context.get('NO_DIST_INSTALL', False)) generated_files = context.get('GENERATED_FILES') if generated_files: for f in generated_files: flags = generated_files[f] output = f if flags.script: script = mozpath.join(context.srcdir, flags.script) inputs = [ mozpath.join(context.srcdir, i) for i in flags.inputs ] if not os.path.exists(script): raise SandboxValidationError( 'Script for generating %s does not exist: %s' % (f, script), context) if os.path.splitext(script)[1] != '.py': raise SandboxValidationError( 'Script for generating %s does not end in .py: %s' % (f, script), context) for i in inputs: if not os.path.exists(i): raise SandboxValidationError( 'Input for generating %s does not exist: %s' % (f, i), context) else: script = None inputs = [] yield GeneratedFile(context, script, output, inputs) test_harness_files = context.get('TEST_HARNESS_FILES') if test_harness_files: srcdir_files = defaultdict(list) srcdir_pattern_files = defaultdict(list) objdir_files = defaultdict(list) for path, strings in test_harness_files.walk(): if not path and strings: raise SandboxValidationError( 'Cannot install files to the root of TEST_HARNESS_FILES', context) for s in strings: if context.is_objdir_path(s): if s.startswith('!/'): objdir_files[path].append('$(DEPTH)/%s' % s[2:]) else: objdir_files[path].append(s[1:]) else: resolved = context.resolve_path(s) if '*' in s: srcdir_pattern_files[path].append(s) elif not os.path.exists(resolved): raise SandboxValidationError( 'File listed in TEST_HARNESS_FILES does not exist: %s' % s, context) else: srcdir_files[path].append(resolved) yield TestHarnessFiles(context, srcdir_files, srcdir_pattern_files, objdir_files) defines = context.get('DEFINES') if defines: yield Defines(context, defines) resources = context.get('RESOURCE_FILES') if resources: yield Resources(context, resources, defines) for pref in sorted(context['JS_PREFERENCE_FILES']): yield JsPreferenceFile(context, pref) for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]: program = context.get(kind) if program: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" as %s name, ' 'because it is already used in %s' % (program, kind, self._binaries[program].relativedir), context) self._binaries[program] = cls(context, program) self._linkage.append((context, self._binaries[program], kind.replace('PROGRAM', 'USE_LIBS'))) for kind, cls in [('SIMPLE_PROGRAMS', SimpleProgram), ('CPP_UNIT_TESTS', SimpleProgram), ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]: for program in context[kind]: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" in %s, ' 'because it is already used in %s' % (program, kind, self._binaries[program].relativedir), context) self._binaries[program] = cls( context, program, is_unit_test=kind == 'CPP_UNIT_TESTS') self._linkage.append( (context, self._binaries[program], 'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS' else 'USE_LIBS')) extra_js_modules = context.get('EXTRA_JS_MODULES') if extra_js_modules: yield JavaScriptModules(context, extra_js_modules, 'extra') extra_pp_js_modules = context.get('EXTRA_PP_JS_MODULES') if extra_pp_js_modules: yield JavaScriptModules(context, extra_pp_js_modules, 'extra_pp') test_js_modules = context.get('TESTING_JS_MODULES') if test_js_modules: yield JavaScriptModules(context, test_js_modules, 'testing') simple_lists = [ ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile), ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile), ('IPDL_SOURCES', IPDLFile), ('GENERATED_INCLUDES', GeneratedInclude), ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile), ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile), ('TEST_WEBIDL_FILES', TestWebIDLFile), ('WEBIDL_FILES', WebIDLFile), ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface), ] for context_var, klass in simple_lists: for name in context.get(context_var, []): yield klass(context, name) for local_include in context.get('LOCAL_INCLUDES', []): if local_include.startswith('/'): path = context.config.topsrcdir relative_include = local_include[1:] else: path = context.srcdir relative_include = local_include actual_include = os.path.join(path, relative_include) if not os.path.exists(actual_include): raise SandboxValidationError( 'Path specified in LOCAL_INCLUDES ' 'does not exist: %s (resolved to %s)' % (local_include, actual_include), context) yield LocalInclude(context, local_include) final_target_files = context.get('FINAL_TARGET_FILES') if final_target_files: yield FinalTargetFiles(context, final_target_files, context['FINAL_TARGET']) branding_files = context.get('BRANDING_FILES') if branding_files: yield BrandingFiles(context, branding_files) host_libname = context.get('HOST_LIBRARY_NAME') libname = context.get('LIBRARY_NAME') if host_libname: if host_libname == libname: raise SandboxValidationError( 'LIBRARY_NAME and ' 'HOST_LIBRARY_NAME must have a different value', context) lib = HostLibrary(context, host_libname) self._libs[host_libname].append(lib) self._linkage.append((context, lib, 'HOST_USE_LIBS')) final_lib = context.get('FINAL_LIBRARY') if not libname and final_lib: # If no LIBRARY_NAME is given, create one. libname = context.relsrcdir.replace('/', '_') static_lib = context.get('FORCE_STATIC_LIB') shared_lib = context.get('FORCE_SHARED_LIB') static_name = context.get('STATIC_LIBRARY_NAME') shared_name = context.get('SHARED_LIBRARY_NAME') is_framework = context.get('IS_FRAMEWORK') is_component = context.get('IS_COMPONENT') soname = context.get('SONAME') lib_defines = context.get('LIBRARY_DEFINES') shared_args = {} static_args = {} if final_lib: if static_lib: raise SandboxValidationError( 'FINAL_LIBRARY implies FORCE_STATIC_LIB. ' 'Please remove the latter.', context) if shared_lib: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. ' 'Please remove one.', context) if is_framework: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_FRAMEWORK. ' 'Please remove one.', context) if is_component: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_COMPONENT. ' 'Please remove one.', context) static_args['link_into'] = final_lib static_lib = True if libname: if is_component: if static_lib: raise SandboxValidationError( 'IS_COMPONENT conflicts with FORCE_STATIC_LIB. ' 'Please remove one.', context) shared_lib = True shared_args['variant'] = SharedLibrary.COMPONENT if is_framework: if soname: raise SandboxValidationError( 'IS_FRAMEWORK conflicts with SONAME. ' 'Please remove one.', context) shared_lib = True shared_args['variant'] = SharedLibrary.FRAMEWORK if not static_lib and not shared_lib: static_lib = True if static_name: if not static_lib: raise SandboxValidationError( 'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB', context) static_args['real_name'] = static_name if shared_name: if not shared_lib: raise SandboxValidationError( 'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB', context) shared_args['real_name'] = shared_name if soname: if not shared_lib: raise SandboxValidationError( 'SONAME requires FORCE_SHARED_LIB', context) shared_args['soname'] = soname # If both a shared and a static library are created, only the # shared library is meant to be a SDK library. if context.get('SDK_LIBRARY'): if shared_lib: shared_args['is_sdk'] = True elif static_lib: static_args['is_sdk'] = True if shared_lib and static_lib: if not static_name and not shared_name: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but neither STATIC_LIBRARY_NAME or ' 'SHARED_LIBRARY_NAME is set. At least one is required.', context) if static_name and not shared_name and static_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and SHARED_LIBRARY_NAME is unset. Please either ' 'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set ' 'SHARED_LIBRARY_NAME.', context) if shared_name and not static_name and shared_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and STATIC_LIBRARY_NAME is unset. Please either ' 'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set ' 'STATIC_LIBRARY_NAME.', context) if shared_name and static_name and shared_name == static_name: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as ' 'STATIC_LIBRARY_NAME. Please change one of them.', context) if shared_lib: lib = SharedLibrary(context, libname, **shared_args) self._libs[libname].append(lib) self._linkage.append((context, lib, 'USE_LIBS')) if static_lib: lib = StaticLibrary(context, libname, **static_args) self._libs[libname].append(lib) self._linkage.append((context, lib, 'USE_LIBS')) if lib_defines: if not libname: raise SandboxValidationError( 'LIBRARY_DEFINES needs a ' 'LIBRARY_NAME to take effect', context) lib.defines.update(lib_defines) # While there are multiple test manifests, the behavior is very similar # across them. We enforce this by having common handling of all # manifests and outputting a single class type with the differences # described inside the instance. # # Keys are variable prefixes and values are tuples describing how these # manifests should be handled: # # (flavor, install_prefix, package_tests) # # flavor identifies the flavor of this test. # install_prefix is the path prefix of where to install the files in # the tests directory. # package_tests indicates whether to package test files into the test # package; suites that compile the test files should not install # them into the test package. # test_manifests = dict( A11Y=('a11y', 'testing/mochitest', 'a11y', True), BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True), ANDROID_INSTRUMENTATION=('instrumentation', 'instrumentation', '.', False), JETPACK_PACKAGE=('jetpack-package', 'testing/mochitest', 'jetpack-package', True), JETPACK_ADDON=('jetpack-addon', 'testing/mochitest', 'jetpack-addon', False), METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True), MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True), MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True), WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True), XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', True), ) for prefix, info in test_manifests.items(): for path in context.get('%s_MANIFESTS' % prefix, []): for obj in self._process_test_manifest(context, info, path): yield obj for flavor in ('crashtest', 'reftest'): for path in context.get('%s_MANIFESTS' % flavor.upper(), []): for obj in self._process_reftest_manifest( context, flavor, path): yield obj jar_manifests = context.get('JAR_MANIFESTS', []) if len(jar_manifests) > 1: raise SandboxValidationError( 'While JAR_MANIFESTS is a list, ' 'it is currently limited to one value.', context) for path in jar_manifests: yield JARManifest(context, mozpath.join(context.srcdir, path)) # Temporary test to look for jar.mn files that creep in without using # the new declaration. Before, we didn't require jar.mn files to # declared anywhere (they were discovered). This will detect people # relying on the old behavior. if os.path.exists(os.path.join(context.srcdir, 'jar.mn')): if 'jar.mn' not in jar_manifests: raise SandboxValidationError( 'A jar.mn exists but it ' 'is not referenced in the moz.build file. ' 'Please define JAR_MANIFESTS.', context) for name, jar in context.get('JAVA_JAR_TARGETS', {}).items(): yield ContextWrapped(context, jar) for name, data in context.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items(): yield ContextWrapped(context, data) if passthru.variables: yield passthru def _create_substitution(self, cls, context, path): if os.path.isabs(path): path = path[1:] sub = cls(context) sub.input_path = mozpath.join(context.srcdir, '%s.in' % path) sub.output_path = mozpath.join(context.objdir, path) sub.relpath = path return sub def _process_test_manifest(self, context, info, manifest_path): flavor, install_root, install_subdir, package_tests = info manifest_path = mozpath.normpath(manifest_path) path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path)) manifest_dir = mozpath.dirname(path) manifest_reldir = mozpath.dirname( mozpath.relpath(path, context.config.topsrcdir)) install_prefix = mozpath.join(install_root, install_subdir) try: m = manifestparser.TestManifest(manifests=[path], strict=True) defaults = m.manifest_defaults[os.path.normpath(path)] if not m.tests and not 'support-files' in defaults: raise SandboxValidationError('Empty test manifest: %s' % path, context) obj = TestManifest(context, path, m, flavor=flavor, install_prefix=install_prefix, relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), dupe_manifest='dupe-manifest' in defaults) filtered = m.tests # Jetpack add-on tests are expected to be generated during the # build process so they won't exist here. if flavor != 'jetpack-addon': missing = [ t['name'] for t in filtered if not os.path.exists(t['path']) ] if missing: raise SandboxValidationError( 'Test manifest (%s) lists ' 'test that does not exist: %s' % (path, ', '.join(missing)), context) out_dir = mozpath.join(install_prefix, manifest_reldir) if 'install-to-subdir' in defaults: # This is terrible, but what are you going to do? out_dir = mozpath.join(out_dir, defaults['install-to-subdir']) obj.manifest_obj_relpath = mozpath.join( manifest_reldir, defaults['install-to-subdir'], mozpath.basename(path)) # "head" and "tail" lists. # All manifests support support-files. # # Keep a set of already seen support file patterns, because # repeatedly processing the patterns from the default section # for every test is quite costly (see bug 922517). extras = (('head', set()), ('tail', set()), ('support-files', set())) def process_support_files(test): for thing, seen in extras: value = test.get(thing, '') if value in seen: continue seen.add(value) for pattern in value.split(): # We only support globbing on support-files because # the harness doesn't support * for head and tail. if '*' in pattern and thing == 'support-files': obj.pattern_installs.append( (manifest_dir, pattern, out_dir)) # "absolute" paths identify files that are to be # placed in the install_root directory (no globs) elif pattern[0] == '/': full = mozpath.normpath( mozpath.join(manifest_dir, mozpath.basename(pattern))) obj.installs[full] = (mozpath.join( install_root, pattern[1:]), False) else: full = mozpath.normpath( mozpath.join(manifest_dir, pattern)) dest_path = mozpath.join(out_dir, pattern) # If the path resolves to a different directory # tree, we take special behavior depending on the # entry type. if not full.startswith(manifest_dir): # If it's a support file, we install the file # into the current destination directory. # This implementation makes installing things # with custom prefixes impossible. If this is # needed, we can add support for that via a # special syntax later. if thing == 'support-files': dest_path = mozpath.join( out_dir, os.path.basename(pattern)) # If it's not a support file, we ignore it. # This preserves old behavior so things like # head files doesn't get installed multiple # times. else: continue obj.installs[full] = (mozpath.normpath(dest_path), False) for test in filtered: obj.tests.append(test) # Some test files are compiled and should not be copied into the # test package. They function as identifiers rather than files. if package_tests: obj.installs[mozpath.normpath(test['path'])] = \ (mozpath.join(out_dir, test['relpath']), True) process_support_files(test) if not filtered: # If there are no tests, look for support-files under DEFAULT. process_support_files(defaults) # We also copy manifests into the output directory, # including manifests from [include:foo] directives. for mpath in m.manifests(): mpath = mozpath.normpath(mpath) out_path = mozpath.join(out_dir, mozpath.basename(mpath)) obj.installs[mpath] = (out_path, False) # Some manifests reference files that are auto generated as # part of the build or shouldn't be installed for some # reason. Here, we prune those files from the install set. # FUTURE we should be able to detect autogenerated files from # other build metadata. Once we do that, we can get rid of this. for f in defaults.get('generated-files', '').split(): # We re-raise otherwise the stack trace isn't informative. try: del obj.installs[mozpath.join(manifest_dir, f)] except KeyError: raise SandboxValidationError( 'Error processing test ' 'manifest %s: entry in generated-files not present ' 'elsewhere in manifest: %s' % (path, f), context) obj.external_installs.add(mozpath.join(out_dir, f)) yield obj except (AssertionError, Exception): raise SandboxValidationError( 'Error processing test ' 'manifest file %s: %s' % (path, '\n'.join(traceback.format_exception(*sys.exc_info()))), context) def _process_reftest_manifest(self, context, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath( mozpath.join(context.srcdir, manifest_path)) manifest_reldir = mozpath.dirname( mozpath.relpath(manifest_full_path, context.config.topsrcdir)) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. obj = TestManifest(context, manifest_full_path, manifest, flavor=flavor, install_prefix='%s/' % flavor, relpath=mozpath.join( manifest_reldir, mozpath.basename(manifest_path))) for test in sorted(manifest.files): obj.tests.append({ 'path': test, 'here': mozpath.dirname(test), 'manifest': manifest_full_path, 'name': mozpath.basename(test), 'head': '', 'tail': '', 'support-files': '', 'subsuite': '', }) yield obj def _emit_directory_traversal_from_context(self, context): o = DirectoryTraversal(context) o.dirs = context.get('DIRS', []) o.test_dirs = context.get('TEST_DIRS', []) o.affected_tiers = context.get_affected_tiers() # Some paths have a subconfigure, yet also have a moz.build. Those # shouldn't end up in self._external_paths. self._external_paths -= {o.relobjdir} yield o
class TupBackend(CommonBackend): """Backend that generates Tupfiles for the tup build system. """ def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) # These are a hack to approximate things that are needed for the # compile phase. self._compile_env_files = ( '*.api', '*.c', '*.cfg', '*.cpp', '*.h', '*.inc', '*.msg', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' self._rust_libs = '$(MOZ_OBJ_ROOT)/<rust-libs>' # The preprocessor including source-repo.h and buildid.h creates # dependencies that aren't specified by moz.build and cause errors # in Tup. Express these as a group dependency. self._early_generated_files = '$(MOZ_OBJ_ROOT)/<early-generated-files>' self._shlibs = '$(MOZ_OBJ_ROOT)/<shlibs>' self._gtests = '$(MOZ_OBJ_ROOT)/<gtest>' self._default_group = '$(MOZ_OBJ_ROOT)/<default>' # The two rust libraries in the tree share many prerequisites, so we need # to prune common dependencies and therefore build all rust from the same # Tupfile. self._rust_outputs = set() self._rust_backend_file = self._get_backend_file( 'toolkit/library/rust') self._built_in_addons = set() self._built_in_addons_file = 'dist/bin/browser/chrome/browser/content/browser/built_in_addons.json' def _get_mozconfig_env(self, config): env = {} loader = MozconfigLoader(config.topsrcdir) mozconfig = loader.read_mozconfig(config.substs['MOZCONFIG']) make_extra = mozconfig['make_extra'] or [] env = {} for line in make_extra: if line.startswith('export '): line = line[len('export '):] key, value = line.split('=') env[key] = value return env def build(self, config, output, jobs, verbose, what=None): if not what: what = ['%s/<default>' % config.topobjdir] args = [self.environment.substs['TUP'], 'upd'] + what if self.environment.substs.get('MOZ_AUTOMATION'): args += ['--quiet'] if verbose: args += ['--verbose'] if jobs > 0: args += ['-j%d' % jobs] else: args += ['-j%d' % multiprocessing.cpu_count()] status = config.run_process(args=args, line_handler=output.on_line, ensure_exit_code=False, append_env=self._get_mozconfig_env(config)) # upload Tup db if (not status and self.environment.substs.get('MOZ_AUTOMATION') and self.environment.substs.get('UPLOAD_TUP_DB')): src = mozpath.join(self.environment.topsrcdir, '.tup') dst = mozpath.join(os.environ['UPLOAD_PATH'], 'tup_db') shutil.make_archive(dst, 'zip', src) return status def _get_backend_file(self, relobjdir): objdir = mozpath.normpath( mozpath.join(self.environment.topobjdir, relobjdir)) if objdir not in self._backend_files: self._backend_files[objdir] = \ BackendTupfile(objdir, self.environment, self.environment.topsrcdir, self.environment.topobjdir, self.dry_run, self._default_group) return self._backend_files[objdir] def _get_backend_file_for(self, obj): return self._get_backend_file(obj.relobjdir) def _py_action(self, action): cmd = [ '$(PYTHON)', '-m', 'mozbuild.action.%s' % action, ] return cmd def _lib_paths(self, objdir, libs): return [ mozpath.relpath(mozpath.join(l.objdir, l.import_name), objdir) for l in libs ] def _gen_shared_library(self, backend_file): shlib = backend_file.shared_lib output_group = self._shlibs if 'toolkit/library/gtest' in backend_file.objdir: output_group = self._gtests if shlib.cxx_link: mkshlib = ([backend_file.environment.substs['CXX']] + backend_file.local_flags['CXX_LDFLAGS']) else: mkshlib = ([backend_file.environment.substs['CC']] + backend_file.local_flags['C_LDFLAGS']) mkshlib += (backend_file.environment.substs['DSO_PIC_CFLAGS'] + [backend_file.environment.substs['DSO_LDOPTS']] + ['-Wl,-h,%s' % shlib.soname] + ['-o', shlib.lib_name]) objs, _, _, shared_libs, os_libs, static_libs = self._expand_libs( shlib) static_libs = self._lib_paths(backend_file.objdir, static_libs) shared_libs = self._lib_paths(backend_file.objdir, shared_libs) list_file_name = '%s.list' % shlib.name.replace('.', '_') list_file = self._make_list_file(backend_file.objdir, objs, list_file_name) rust_linked = self._lib_paths( backend_file.objdir, (l for l in backend_file.shared_lib.linked_libraries if isinstance(l, RustLibrary))) inputs = objs + static_libs + shared_libs extra_inputs = [] if rust_linked: extra_inputs = [self._rust_libs] static_libs += rust_linked symbols_file = [] if shlib.symbols_file: inputs.append(shlib.symbols_file) # TODO: Assumes GNU LD symbols_file = ['-Wl,--version-script,%s' % shlib.symbols_file] cmd = (mkshlib + [list_file] + backend_file.local_flags['LDFLAGS'] + static_libs + shared_libs + symbols_file + [backend_file.environment.substs['OS_LIBS']] + os_libs) backend_file.rule(cmd=cmd, inputs=inputs, extra_inputs=extra_inputs, outputs=[shlib.lib_name], output_group=output_group, display='LINK %o') backend_file.symlink_rule(mozpath.join(backend_file.objdir, shlib.lib_name), output=mozpath.join( self.environment.topobjdir, shlib.install_target, shlib.lib_name), output_group=output_group) def _gen_programs(self, backend_file): for p in backend_file.programs: self._gen_program(backend_file, p) def _gen_program(self, backend_file, prog): cc_or_cxx = 'CXX' if prog.cxx_link else 'CC' objs, _, _, shared_libs, os_libs, static_libs = self._expand_libs(prog) static_libs = self._lib_paths(backend_file.objdir, static_libs) shared_libs = self._lib_paths(backend_file.objdir, shared_libs) # Linking some programs will access libraries installed to dist/bin, # so depend on the installed libraries here. This can be made more # accurate once we start building libraries in their final locations. inputs = objs + static_libs + shared_libs + [self._shlibs] list_file_name = '%s.list' % prog.name.replace('.', '_') list_file = self._make_list_file(backend_file.objdir, objs, list_file_name) if isinstance(prog, SimpleProgram): outputs = [prog.name] else: outputs = [ mozpath.relpath(prog.output_path.full_path, backend_file.objdir) ] cmd = ([backend_file.environment.substs[cc_or_cxx], '-o', '%o'] + backend_file.local_flags['CXX_LDFLAGS'] + [list_file] + backend_file.local_flags['LDFLAGS'] + static_libs + [backend_file.environment.substs['MOZ_PROGRAM_LDFLAGS']] + shared_libs + [backend_file.environment.substs['OS_LIBS']] + os_libs) backend_file.rule(cmd=cmd, inputs=inputs, outputs=outputs, display='LINK %o') def _gen_host_library(self, backend_file): objs = backend_file.host_library.objs inputs = objs outputs = [backend_file.host_library.name] cmd = ([backend_file.environment.substs['HOST_AR']] + [ backend_file.environment.substs['HOST_AR_FLAGS'].replace( '$@', '%o') ] + objs) backend_file.rule(cmd=cmd, inputs=inputs, outputs=outputs, display='AR %o') def _gen_host_programs(self, backend_file): for p in backend_file.host_programs: self._gen_host_program(backend_file, p) def _gen_host_program(self, backend_file, prog): _, _, _, _, extra_libs, _ = self._expand_libs(prog) objs = prog.objs if isinstance(prog, HostSimpleProgram): outputs = [prog.name] else: outputs = [ mozpath.relpath(prog.output_path.full_path, backend_file.objdir) ] host_libs = [] for lib in prog.linked_libraries: if isinstance(lib, HostLibrary): host_libs.append(lib) host_libs = self._lib_paths(backend_file.objdir, host_libs) inputs = objs + host_libs use_cxx = any(f.endswith(('.cc', '.cpp')) for f in prog.source_files()) cc_or_cxx = 'HOST_CXX' if use_cxx else 'HOST_CC' cmd = ([backend_file.environment.substs[cc_or_cxx], '-o', '%o'] + backend_file.local_flags['HOST_CXX_LDFLAGS'] + backend_file.local_flags['HOST_LDFLAGS'] + objs + host_libs + extra_libs) backend_file.rule(cmd=cmd, inputs=inputs, outputs=outputs, display='LINK %o') def _gen_static_library(self, backend_file): ar = [ backend_file.environment.substs['AR'], backend_file.environment.substs['AR_FLAGS'].replace('$@', '%o') ] objs, _, _, shared_libs, _, static_libs = self._expand_libs( backend_file.static_lib) static_libs = self._lib_paths(backend_file.objdir, static_libs) shared_libs = self._lib_paths(backend_file.objdir, shared_libs) inputs = objs + static_libs cmd = (ar + inputs) backend_file.rule(cmd=cmd, inputs=inputs, outputs=[backend_file.static_lib.name], display='AR %o') def consume_object(self, obj): """Write out build files necessary to build with tup.""" if not isinstance(obj, ContextDerived): return False consumed = CommonBackend.consume_object(self, obj) if consumed: return True backend_file = self._get_backend_file_for(obj) if isinstance(obj, GeneratedFile): skip_files = [] if self.environment.is_artifact_build: skip_files = self._compile_env_gen for f in obj.outputs: if any(mozpath.match(f, p) for p in skip_files): return False if backend_file.requires_delay(obj.inputs): backend_file.delayed_generated_files.append(obj) else: self._process_generated_file(backend_file, obj) elif (isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith( ('dist/bin', 'dist/xpi-stage'))): # The quitter extension specifies its chrome.manifest as a # FINAL_TARGET_FILE, which conflicts with the manifest generation # we do here, so skip it for now. if obj.install_target != 'dist/xpi-stage/quitter': top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, Defines): self._process_defines(backend_file, obj) elif isinstance(obj, HostDefines): self._process_defines(backend_file, obj, host=True) elif isinstance(obj, FinalTargetFiles): self._process_final_target_files(obj) elif isinstance(obj, FinalTargetPreprocessedFiles): self._process_final_target_pp_files(obj, backend_file) elif isinstance(obj, JARManifest): self._consume_jar_manifest(obj) elif isinstance(obj, PerSourceFlag): backend_file.per_source_flags[obj.file_name].extend(obj.flags) elif isinstance(obj, ComputedFlags): self._process_computed_flags(obj, backend_file) elif isinstance(obj, (Sources, GeneratedSources)): backend_file.sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, HostSources): backend_file.host_sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, VariablePassthru): backend_file.variables = obj.variables elif isinstance(obj, RustLibrary): self._gen_rust_rules(obj, backend_file) elif isinstance(obj, StaticLibrary): backend_file.static_lib = obj elif isinstance(obj, SharedLibrary): backend_file.shared_lib = obj elif isinstance(obj, (HostProgram, HostSimpleProgram)): backend_file.host_programs.append(obj) elif isinstance(obj, HostLibrary): backend_file.host_library = obj elif isinstance(obj, (Program, SimpleProgram)): backend_file.programs.append(obj) elif isinstance(obj, DirectoryTraversal): pass return True def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) if self._built_in_addons: with self._write_file( mozpath.join(self.environment.topobjdir, self._built_in_addons_file)) as fh: json.dump({'system': sorted(list(self._built_in_addons))}, fh) for objdir, backend_file in sorted(self._backend_files.items()): backend_file.gen_sources_rules([self._installed_files]) for var, gen_method in ((backend_file.shared_lib, self._gen_shared_library), (backend_file.static_lib and backend_file.static_lib.no_expand_lib, self._gen_static_library), (backend_file.programs, self._gen_programs), (backend_file.host_programs, self._gen_host_programs), (backend_file.host_library, self._gen_host_library)): if var: backend_file.export_shell() gen_method(backend_file) for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) for path, output, output_group in backend_file.delayed_installed_files: backend_file.symlink_rule(path, output=output, output_group=output_group) with self._write_file(fh=backend_file): pass with self._write_file( mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines_flags = ' '.join([ '-D%s=%s' % (name, shell_quote(value)) for (name, value) in sorted(self.environment.acdefines.iteritems()) ]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % (os.path.relpath( self.environment.topsrcdir, self.environment.topobjdir))) fh.write('PYTHON = PYTHONDONTWRITEBYTECODE=1 %s\n' % self.environment.substs['PYTHON']) fh.write( 'PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write( 'IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n' ) # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init']) def _get_cargo_flags(self, obj): cargo_flags = ['--build-plan', '-Z', 'unstable-options'] if not self.environment.substs.get('MOZ_DEBUG_RUST'): cargo_flags += ['--release'] cargo_flags += [ '--frozen', '--manifest-path', mozpath.join(obj.srcdir, 'Cargo.toml'), '--lib', '--target=%s' % self.environment.substs['RUST_TARGET'], ] if obj.features: cargo_flags += ['--features', ' '.join(obj.features)] return cargo_flags def _get_cargo_env(self, lib, backend_file): env = { 'CARGO_TARGET_DIR': mozpath.normpath(mozpath.join(lib.objdir, lib.target_dir)), 'RUSTC': self.environment.substs['RUSTC'], 'MOZ_SRC': self.environment.topsrcdir, 'MOZ_DIST': self.environment.substs['DIST'], 'LIBCLANG_PATH': self.environment.substs['MOZ_LIBCLANG_PATH'], 'CLANG_PATH': self.environment.substs['MOZ_CLANG_PATH'], 'PKG_CONFIG_ALLOW_CROSS': '1', 'RUST_BACKTRACE': 'full', 'MOZ_TOPOBJDIR': self.environment.topobjdir, 'PYTHON': self.environment.substs['PYTHON'], 'PYTHONDONTWRITEBYTECODE': '1', } cargo_incremental = self.environment.substs.get('CARGO_INCREMENTAL') if cargo_incremental is not None: # TODO (bug 1468527): CARGO_INCREMENTAL produces outputs that Tup # doesn't know about, disable it unconditionally for now. pass # env['CARGO_INCREMENTAL'] = cargo_incremental rust_simd = self.environment.substs.get('MOZ_RUST_SIMD') if rust_simd is not None: env['RUSTC_BOOTSTRAP'] = '1' linker_env_var = ('CARGO_TARGET_%s_LINKER' % self.environment.substs['RUST_TARGET_ENV_NAME']) env.update({ 'MOZ_CARGO_WRAP_LDFLAGS': ' '.join(backend_file.local_flags['LDFLAGS']), 'MOZ_CARGO_WRAP_LD': backend_file.environment.substs['CC'], linker_env_var: mozpath.join(self.environment.topsrcdir, 'build', 'cargo-linker'), 'RUSTFLAGS': '%s %s' % (' '.join(self.environment.substs['MOZ_RUST_DEFAULT_FLAGS']), ' '.join(self.environment.substs['RUSTFLAGS'])), }) return env def _gen_cargo_rules(self, backend_file, build_plan, cargo_env): invocations = build_plan['invocations'] processed = set() def get_libloading_outdir(): for invocation in invocations: if (invocation['package_name'] == 'libloading' and invocation['outputs'][0].endswith('.rlib')): return invocation['env']['OUT_DIR'] def display_name(invocation): output_str = '' if invocation['outputs']: output_str = ' -> %s' % ' '.join( [os.path.basename(f) for f in invocation['outputs']]) return '{name} v{version} {kind}{output}'.format( name=invocation['package_name'], version=invocation['package_version'], kind=invocation['kind'], output=output_str) def cargo_quote(s): return shell_quote(s.replace('\n', '\\n')) def _process(key, invocation): if key in processed: return processed.add(key) inputs = set() shortname = invocation['package_name'] for dep in invocation['deps']: # We'd expect to just handle dependencies transitively (so use # invocations[dep]['outputs'] here, but because the weird host dependencies # sometimes get used in the final library and not intermediate # libraries, tup doesn't work well with them. So build up the full set # of intermediate dependencies with 'full-deps' depmod = invocations[dep] _process(dep, depmod) inputs.update(depmod['full-deps']) command = [ 'cd %s &&' % invocation['cwd'], 'env', ] envvars = invocation.get('env') for k, v in itertools.chain(cargo_env.iteritems(), envvars.iteritems()): command.append("%s=%s" % (k, cargo_quote(v))) command.append(invocation['program']) command.extend( cargo_quote(a.replace('dep-info,', '')) for a in invocation['args']) outputs = invocation['outputs'] if os.path.basename(invocation['program']) == 'build-script-build': for output in cargo_extra_outputs.get(shortname, []): outputs.append( os.path.join(invocation['env']['OUT_DIR'], output)) if (invocation['target_kind'][0] == 'custom-build' and os.path.basename(invocation['program']) == 'rustc'): flags = cargo_extra_flags.get(shortname, []) for flag in flags: command.append( flag % {'libloading_outdir': get_libloading_outdir()}) if 'rustc' in invocation['program']: header = 'RUSTC' else: inputs.add(invocation['program']) header = 'RUN' invocation['full-deps'] = set(inputs) invocation['full-deps'].update(invocation['outputs']) output_key = tuple(outputs) if output_key not in self._rust_outputs: self._rust_outputs.add(output_key) self._rust_backend_file.rule( command, inputs=sorted(inputs), outputs=outputs, output_group=self._rust_libs, extra_inputs=[self._installed_files], display='%s %s' % (header, display_name(invocation)), ) for dst, link in invocation['links'].iteritems(): self._rust_outputs.add(output_key) self._rust_backend_file.symlink_rule( link, dst, self._rust_libs) for val in enumerate(invocations): _process(*val) def _gen_rust_rules(self, obj, backend_file): cargo_flags = self._get_cargo_flags(obj) cargo_env = self._get_cargo_env(obj, backend_file) output_lines = [] def accumulate_output(line): output_lines.append(line) cargo_status = self._cmd.run_process( [self.environment.substs['CARGO'], 'build'] + cargo_flags, line_handler=accumulate_output, ensure_exit_code=False, explicit_env=cargo_env) if cargo_status: raise Exception("cargo --build-plan failed with output:\n%s" % '\n'.join(output_lines)) cargo_plan = json.loads(''.join(output_lines)) self._gen_cargo_rules(backend_file, cargo_plan, cargo_env) self.backend_input_files |= set(cargo_plan['inputs']) def _process_generated_file(self, backend_file, obj): if obj.script and obj.method: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required 'unused', # deps target is required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) extra_exports = { 'buildid.h': ['MOZ_BUILD_DATE'], } for f in obj.outputs: exports = extra_exports.get(f) if exports: backend_file.export(exports) if any( f.endswith(('automation.py', 'source-repo.h', 'buildid.h')) for f in obj.outputs): output_group = self._early_generated_files else: output_group = self._installed_files if obj.required_for_compile else None full_inputs += [self._early_generated_files] extra_inputs = [] if any(f in obj.outputs for f in ('dependentlibs.list', 'dependendentlibs.list.gtest')): extra_inputs += [self._shlibs] if len(outputs) > 3: display_outputs = ', '.join(outputs[0:3]) + ', ...' else: display_outputs = ', '.join(outputs) display = 'python {script}:{method} -> [{display_outputs}]'.format( script=obj.script, method=obj.method, display_outputs=display_outputs) backend_file.rule( display=display, cmd=cmd, inputs=full_inputs, extra_inputs=extra_inputs, outputs=outputs, output_group=output_group, check_unchanged=True, ) def _process_defines(self, backend_file, obj, host=False): defines = list(obj.get_defines()) if defines: if host: backend_file.host_defines = defines else: backend_file.defines = defines def _add_features(self, target, path): path_parts = mozpath.split(path) if all([ target == 'dist/bin/browser', path_parts[0] == 'features', len(path_parts) > 1 ]): self._built_in_addons.add(path_parts[1]) def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) for path, files in obj.files.walk(): self._add_features(target, path) for f in files: output_group = None if any( mozpath.match(mozpath.basename(f), p) for p in self._compile_env_files): output_group = self._installed_files if not isinstance(f, ObjDirPath): backend_file = self._get_backend_file( mozpath.join(target, path)) if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception( "Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self.backend_input_files.add(prefix) output_dir = '' # If we have a RenamedSourcePath here, the common backend # has generated this object from a jar manifest, and we # can rely on 'path' to be our destination path relative # to any wildcard match. Otherwise, the output file may # contribute to our destination directory. if not isinstance(f, RenamedSourcePath): output_dir = ''.join( _prefix(mozpath.dirname(f))) finder = FileFinder(prefix) for p, _ in finder.find(f.full_path[len(prefix):]): install_dir = prefix[len(obj.srcdir) + 1:] output = p if f.target_basename and '*' not in f.target_basename: output = mozpath.join( f.target_basename, output) backend_file.symlink_rule( mozpath.join(prefix, p), output=mozpath.join(output_dir, output), output_group=output_group) else: backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group) else: if (self.environment.is_artifact_build and any( mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)): # If we have an artifact build we never would have generated this file, # so do not attempt to install it. continue output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path, f.target_basename) gen_backend_file = self._get_backend_file( f.context.relobjdir) if gen_backend_file.requires_delay([f]): gen_backend_file.delayed_installed_files.append( (f.full_path, output, output_group)) else: gen_backend_file.symlink_rule( f.full_path, output=output, output_group=output_group) def _process_final_target_pp_files(self, obj, backend_file): for i, (path, files) in enumerate(obj.files.walk()): self._add_features(obj.install_target, path) for f in files: self._preprocess(backend_file, f.full_path, destdir=mozpath.join( self.environment.topobjdir, obj.install_target, path), target=f.target_basename) def _process_computed_flags(self, obj, backend_file): for var, flags in obj.get_flags(): backend_file.local_flags[var] = flags def _process_unified_sources(self, obj): backend_file = self._get_backend_file_for(obj) files = [f[0] for f in obj.unified_source_mapping] backend_file.sources[obj.canonical_suffix].extend(files) def _handle_idl_manager(self, manager): if self.environment.is_artifact_build: return backend_file = self._get_backend_file('xpcom/xpidl') backend_file.export_shell() all_idl_directories = set() all_idl_directories.update( *map(lambda x: x[1], manager.modules.itervalues())) all_xpts = [] for module, (idls, _) in sorted(manager.modules.iteritems()): cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '-I$(IDL_PARSER_DIR)', '-I$(IDL_PARSER_CACHE_DIR)', '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py', '--cache-dir', '$(IDL_PARSER_CACHE_DIR)', '--bindings-conf', '$(topsrcdir)/dom/bindings/Bindings.conf', ] for d in all_idl_directories: cmd.extend(['-I', d]) cmd.extend([ '$(DIST)/include', '$(DIST)/xpcrs', '.', module, ]) cmd.extend(sorted(idls)) all_xpts.append('$(MOZ_OBJ_ROOT)/%s/%s.xpt' % (backend_file.relobjdir, module)) outputs = ['%s.xpt' % module] stems = sorted( mozpath.splitext(mozpath.basename(idl))[0] for idl in idls) outputs.extend( ['$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in stems]) outputs.extend( ['$(MOZ_OBJ_ROOT)/dist/xpcrs/rt/%s.rs' % f for f in stems]) outputs.extend( ['$(MOZ_OBJ_ROOT)/dist/xpcrs/bt/%s.rs' % f for f in stems]) backend_file.rule( inputs=[ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py', '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py', self._installed_idls, ], display='XPIDL %s' % module, cmd=cmd, outputs=outputs, output_group=self._installed_files, check_unchanged=True, ) cpp_backend_file = self._get_backend_file('xpcom/reflect/xptinfo') cpp_backend_file.export_shell() cpp_backend_file.rule( inputs=all_xpts, display='XPIDL xptcodegen.py %o', cmd=[ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '$(topsrcdir)/xpcom/reflect/xptinfo/xptcodegen.py', '%o', '%f', ], outputs=['xptdata.cpp'], check_unchanged=True, ) def _preprocess(self, backend_file, input_file, destdir=None, target=None): if target is None: target = mozpath.basename(input_file) # .css files use '%' as the preprocessor marker, which must be scaped as # '%%' in the Tupfile. marker = '%%' if target.endswith('.css') else '#' cmd = self._py_action('preprocessor') cmd.extend([shell_quote(d) for d in backend_file.defines]) cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker]) base_input = mozpath.basename(target) if base_input.endswith('.in'): base_input = mozpath.splitext(base_input)[0] output = mozpath.join(destdir, base_input) if destdir else base_input backend_file.rule( inputs=[input_file], extra_inputs=[self._early_generated_files], display='Preprocess %o', cmd=cmd, outputs=[output], check_unchanged=True, ) def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources, sorted_nonstatic_ipdl_sources, sorted_static_ipdl_sources, unified_ipdl_cppsrcs_mapping): # Preferably we wouldn't have to import ipdl, but we need to parse the # ast in order to determine the namespaces since they are used in the # header output paths. sys.path.append(mozpath.join(self.environment.topsrcdir, 'ipc', 'ipdl')) import ipdl backend_file = self._get_backend_file('ipc/ipdl') outheaderdir = '_ipdlheaders' srcdir = mozpath.join(self.environment.topsrcdir, 'ipc/ipdl') cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '%s/ipdl.py' % srcdir, '--sync-msg-list=%s/sync-messages.ini' % srcdir, '--msg-metadata=%s/message-metadata.ini' % srcdir, '--outheaders-dir=%s' % outheaderdir, '--outcpp-dir=.', ] ipdldirs = sorted(set(mozpath.dirname(p) for p in sorted_ipdl_sources)) cmd.extend(['-I%s' % d for d in ipdldirs]) cmd.extend(sorted_ipdl_sources) outputs = [ 'IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py' ] for filename in sorted_ipdl_sources: filepath, ext = os.path.splitext(filename) dirname, basename = os.path.split(filepath) dirname = mozpath.relpath(dirname, self.environment.topsrcdir) extensions = [''] if ext == '.ipdl': extensions.extend(['Child', 'Parent']) with open(filename) as f: ast = ipdl.parse(f.read(), filename, includedirs=ipdldirs) self.backend_input_files.add(filename) headerdir = os.path.join(outheaderdir, *([ns.name for ns in ast.namespaces])) for extension in extensions: outputs.append("%s%s.cpp" % (basename, extension)) outputs.append( mozpath.join(headerdir, '%s%s.h' % (basename, extension))) backend_file.rule( display='IPDL code generation', cmd=cmd, outputs=outputs, output_group=self._installed_files, check_unchanged=True, ) backend_file.sources['.cpp'].extend( u[0] for u in unified_ipdl_cppsrcs_mapping) def _handle_webidl_build(self, bindings_dir, unified_source_mapping, webidls, expected_build_output_files, global_define_files): backend_file = self._get_backend_file('dom/bindings') backend_file.export_shell() for source in sorted(webidls.all_preprocessed_sources()): self._preprocess(backend_file, source) cmd = self._py_action('webidl') cmd.append(mozpath.join(self.environment.topsrcdir, 'dom', 'bindings')) # The WebIDLCodegenManager knows all of the .cpp and .h files that will # be created (expected_build_output_files), but there are a few # additional files that are also created by the webidl py_action. outputs = [ '_cache/webidlyacc.py', 'codegen.json', 'codegen.pp', 'parser.out', ] outputs.extend(expected_build_output_files) backend_file.rule( display='WebIDL code generation', cmd=cmd, inputs=webidls.all_non_static_basenames(), outputs=outputs, output_group=self._installed_files, check_unchanged=True, ) backend_file.sources['.cpp'].extend(u[0] for u in unified_source_mapping) backend_file.sources['.cpp'].extend(sorted(global_define_files)) test_backend_file = self._get_backend_file('dom/bindings/test') test_backend_file.sources['.cpp'].extend( sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
class TestMetadata(object): """Holds information about tests. This class provides an API to query tests active in the build configuration. """ def __init__(self, all_tests, srcdir, test_defaults=None): self._tests_by_path = OrderedDefaultDict(list) self._tests_by_flavor = defaultdict(set) self._test_dirs = set() self._objdir = os.path.abspath(os.path.join(all_tests, os.pardir)) self._wpt_loaded = False self._srcdir = srcdir with open(all_tests, 'rb') as fh: test_data = pickle.load(fh) defaults = None if test_defaults: with open(test_defaults, 'rb') as fh: defaults = pickle.load(fh) for path, tests in test_data.items(): for metadata in tests: if defaults: defaults_manifests = [metadata['manifest']] ancestor_manifest = metadata.get('ancestor-manifest') if ancestor_manifest: # The (ancestor manifest, included manifest) tuple # contains the defaults of the included manifest, so # use it instead of [metadata['manifest']]. defaults_manifests[0] = (ancestor_manifest, metadata['manifest']) defaults_manifests.append(ancestor_manifest) for manifest in defaults_manifests: manifest_defaults = defaults.get(manifest) if manifest_defaults: metadata = manifestparser.combine_fields(manifest_defaults, metadata) self._tests_by_path[path].append(metadata) self._test_dirs.add(os.path.dirname(path)) flavor = metadata.get('flavor') self._tests_by_flavor[flavor].add(path) def tests_with_flavor(self, flavor): """Obtain all tests having the specified flavor. This is a generator of dicts describing each test. """ for path in sorted(self._tests_by_flavor.get(flavor, [])): yield self._tests_by_path[path] def resolve_tests(self, paths=None, flavor=None, subsuite=None, under_path=None, tags=None): """Resolve tests from an identifier. This is a generator of dicts describing each test. ``paths`` can be an iterable of values to use to identify tests to run. If an entry is a known test file, tests associated with that file are returned (there may be multiple configurations for a single file). If an entry is a directory, or a prefix of a directory containing tests, all tests in that directory are returned. If the string appears in a known test file, that test file is considered. If the path contains a wildcard pattern, tests matching that pattern are returned. If ``under_path`` is a string, it will be used to filter out tests that aren't in the specified path prefix relative to topsrcdir or the test's installed dir. If ``flavor`` is a string, it will be used to filter returned tests to only be the flavor specified. A flavor is something like ``xpcshell``. If ``subsuite`` is a string, it will be used to filter returned tests to only be in the subsuite specified. If ``tags`` are specified, they will be used to filter returned tests to only those with a matching tag. """ if tags: tags = set(tags) def fltr(tests): for test in tests: if flavor: if flavor == 'devtools' and test.get('flavor') != 'browser-chrome': continue if flavor != 'devtools' and test.get('flavor') != flavor: continue if subsuite and test.get('subsuite') != subsuite: continue if tags and not (tags & set(test.get('tags', '').split())): continue if under_path and not test['file_relpath'].startswith(under_path): continue # Make a copy so modifications don't change the source. yield dict(test) paths = paths or [] paths = [mozpath.normpath(p) for p in paths] if not paths: paths = [None] candidate_paths = set() if flavor in (None, 'web-platform-tests') and any(self.is_wpt_path(p) for p in paths): self.add_wpt_manifest_data() for path in sorted(paths): if path is None: candidate_paths |= set(self._tests_by_path.keys()) continue if '*' in path: candidate_paths |= {p for p in self._tests_by_path if mozpath.match(p, path)} continue # If the path is a directory, or the path is a prefix of a directory # containing tests, pull in all tests in that directory. if (path in self._test_dirs or any(p.startswith(path) for p in self._tests_by_path)): candidate_paths |= {p for p in self._tests_by_path if p.startswith(path)} continue # If it's a test file, add just that file. candidate_paths |= {p for p in self._tests_by_path if path in p} for p in sorted(candidate_paths): tests = self._tests_by_path[p] for test in fltr(tests): yield test def is_wpt_path(self, path): if path is None: return True if mozpath.match(path, "testing/web-platform/tests/**"): return True if mozpath.match(path, "testing/web-platform/mozilla/tests/**"): return True return False def add_wpt_manifest_data(self): if self._wpt_loaded: return wpt_path = os.path.join(self._srcdir, "testing", "web-platform") sys.path = [wpt_path] + sys.path import manifestupdate # Set up a logger that will drop all the output import logging logger = logging.getLogger("manifestupdate") logger.propogate = False manifests = manifestupdate.run(self._srcdir, self._objdir, rebuild=False, download=True, config_path=None, rewrite_config=True, update=True, logger=logger) if not manifests: print("Loading wpt manifest failed") return for manifest, data in manifests.iteritems(): tests_root = data["tests_path"] for test_type, path, tests in manifest: full_path = os.path.join(tests_root, path) src_path = os.path.relpath(full_path, self._srcdir) if test_type not in ["testharness", "reftest", "wdspec"]: continue for test in tests: self._tests_by_path[src_path].append({ "path": full_path, "flavor": "web-platform-tests", "here": os.path.dirname(path), "manifest": data["manifest_path"], "name": test.id, "file_relpath": path, "head": "", "support-files": "", "subsuite": test_type, "dir_relpath": os.path.dirname(src_path), "srcdir_relpath": src_path, }) self._wpt_loaded = True
class FasterMakeBackend(CommonBackend, PartialBackend): def _init(self): super(FasterMakeBackend, self)._init() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._has_xpidl = False def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] if target.endswith('.css'): kwargs['marker'] = '%' depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): defines = obj.defines or {} if defines: defines = defines.defines for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, target=f.target_basename, defines=defines) elif '*' in f: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) if '*' in f.target_basename: target = path else: target = mozpath.join(path, f.target_basename) mozpath.join(path, f.target_basename) self._install_manifests[obj.install_target] \ .add_pattern_link( prefix, f.full_path[len(prefix):], target) else: self._install_manifests[obj.install_target].add_link( f.full_path, mozpath.join(path, f.target_basename) ) if isinstance(f, ObjDirPath): dep_target = 'install-%s' % obj.install_target self._dependencies[dep_target].append( mozpath.relpath(f.full_path, self.environment.topobjdir)) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath(obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # We're not actually handling XPIDL files. return False else: return False return True def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) if not self._has_xpidl: mk.add_statement('NO_XPIDL = 1') # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): value = self.environment.substs.get(var) if value is not None: mk.add_statement('%s = %s' % (var, value)) install_manifests_bases = self._install_manifests.keys() # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) install_target = mozpath.basedir(target, install_manifests_bases) self._install_manifests[install_target].add_content( ''.join('%s\n' % e for e in sorted(entries)), mozpath.relpath(target, install_target)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies( '$(TOPOBJDIR)/%s' % d for d in deps) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) # For artifact builds only, write a single unified manifest for consumption by |mach watch|. if self.environment.is_artifact_build: unified_manifest = InstallManifest() for base, install_manifest in self._install_manifests.iteritems(): # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash. assert base.startswith('dist/bin') base = base[len('dist/bin'):] if base and base[0] == '/': base = base[1:] unified_manifest.add_entries_from(install_manifest, base=base) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'unified_install_dist_bin')) as fh: unified_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
class FasterMakeBackend(CommonBackend): def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._manifest_entries = OrderedDefaultDict(set) self._install_manifests = OrderedDefaultDict(InstallManifest) self._dependencies = OrderedDefaultDict(list) self._has_xpidl = False def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj, defines) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, defines=defines) else: self._install_manifests[obj.install_target].add_symlink( f.full_path, mozpath.join(path, mozpath.basename(f)) ) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath(obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # XPIDL are emitted before Defines, which breaks the assert in the # branch for Defines. OTOH, we don't actually care about the # XPIDLFile objects just yet, so we can just pretend we didn't see # an object in the directory yet. return True else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True def _consume_jar_manifest(self, obj, defines): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() pp.context.update(defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() pp.do_include(obj.path) self.backend_input_files |= pp.includes for jarinfo in pp.out: install_target = obj.install_target if jarinfo.base: install_target = mozpath.normpath( mozpath.join(install_target, jarinfo.base)) for e in jarinfo.entries: if e.is_locale: if jarinfo.relativesrcdir: path = mozpath.join(self.environment.topsrcdir, jarinfo.relativesrcdir) else: path = mozpath.dirname(obj.path) src = mozpath.join( path, 'en-US', e.source) elif e.source.startswith('/'): src = mozpath.join(self.environment.topsrcdir, e.source[1:]) else: src = mozpath.join(mozpath.dirname(obj.path), e.source) if '*' in e.source: if e.preprocess: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) self._install_manifests[install_target] \ .add_pattern_symlink( prefix, src[len(prefix):], mozpath.join(jarinfo.name, e.output)) continue if not os.path.exists(src): if e.is_locale: raise Exception( '%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = mozpath.join(self.environment.topobjdir, e.source[1:]) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = mozpath.join(obj.objdir, e.source) self._dependencies['install-%s' % install_target] \ .append(mozpath.relpath( src, self.environment.topobjdir)) if e.preprocess: kwargs = {} if src.endswith('.css'): kwargs['marker'] = '%' self._add_preprocess( obj, src, mozpath.join(jarinfo.name, mozpath.dirname(e.output)), mozpath.basename(e.output), defines=defines, **kwargs) else: self._install_manifests[install_target].add_symlink( src, mozpath.join(jarinfo.name, e.output)) manifest = mozpath.normpath(mozpath.join(install_target, jarinfo.name)) manifest += '.manifest' for m in jarinfo.chrome_manifests: self._manifest_entries[manifest].add( m.replace('%', mozpath.basename(jarinfo.name) + '/')) if jarinfo.name != 'chrome': manifest = mozpath.normpath(mozpath.join(install_target, 'chrome.manifest')) entry = 'manifest %s.manifest' % jarinfo.name self._manifest_entries[manifest].add(entry) def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) mk.add_statement('BACKEND = %s' % self._backend_output_list_file) if not self._has_xpidl: mk.add_statement('NO_XPIDL = 1') # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): mk.add_statement('%s = %s' % (var, self.environment.substs[var])) install_manifests_bases = self._install_manifests.keys() # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) install_target = mozpath.basedir(target, install_manifests_bases) self._install_manifests[install_target].add_content( ''.join('%s\n' % e for e in sorted(entries)), mozpath.relpath(target, install_target)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies( '$(TOPOBJDIR)/%s' % d for d in deps) # Add backend dependencies: mk.create_rule([self._backend_output_list_file]).add_dependencies( self.backend_input_files) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
class FasterMakeBackend(CommonBackend): def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._manifest_entries = OrderedDefaultDict(list) self._install_manifests = OrderedDefaultDict(OverwriteInstallManifest) self._dependencies = OrderedDefaultDict(list) def _add_preprocess(self, obj, path, dest, target=None, **kwargs): if target is None: target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj, defines) elif isinstance(obj, VariablePassthru) and \ obj.install_target.startswith('dist/bin'): for f in obj.variables.get('EXTRA_COMPONENTS', {}): path = mozpath.join(obj.install_target, 'components', mozpath.basename(f)) self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join('components', mozpath.basename(f)) ) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) for f in obj.variables.get('EXTRA_PP_COMPONENTS', {}): self._add_preprocess(obj, f, 'components', defines=defines) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) elif isinstance(obj, JavaScriptModules) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.modules.walk(): base = mozpath.join('modules', path) for f in strings: if obj.flavor == 'extra': self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f)) ) elif obj.flavor == 'extra_pp': self._add_preprocess(obj, f, base, defines=defines) elif isinstance(obj, JsPreferenceFile) and \ obj.install_target.startswith('dist/bin'): # The condition for the directory value in config/rules.mk is: # ifneq (,$(DIST_SUBDIR)$(XPI_NAME)) # - when XPI_NAME is set, obj.install_target will start with # dist/xpi-stage # - when DIST_SUBDIR is set, obj.install_target will start with # dist/bin/$(DIST_SUBDIR) # So an equivalent condition that is not cumbersome for us and that # is enough at least for now is checking if obj.install_target is # different from dist/bin. if obj.install_target == 'dist/bin': pref_dir = 'defaults/pref' else: pref_dir = 'defaults/preferences' dest = mozpath.join(obj.install_target, pref_dir, mozpath.basename(obj.path)) # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. self._add_preprocess(obj, obj.path, pref_dir, defines=defines, silence_missing_directive_warnings=True) elif isinstance(obj, Resources) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.resources.walk(): base = mozpath.join('res', path) for f in strings: flags = strings.flags_for(f) if flags and flags.preprocess: self._add_preprocess(obj, f, base, marker='%', defines=obj.defines) else: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f)) ) elif isinstance(obj, FinalTargetFiles) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.files.walk(): base = mozpath.join(obj.install_target, path) for f in strings: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(path, mozpath.basename(f)) ) elif isinstance(obj, DistFiles) and \ obj.install_target.startswith('dist/bin'): # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. for f in obj.files: self._add_preprocess(obj, f, '', defines=defines, silence_missing_directive_warnings=True) else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True def _consume_jar_manifest(self, obj, defines): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() pp.context.update(defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() pp.do_include(obj.path) for jarinfo in pp.out: install_target = obj.install_target # Bug 1150417 added some gross hacks, which we don't try to # support generically. Fortunately, the hacks don't define more # than chrome manifest entries, so just assume we don't get # any installation entries. if jarinfo.name.startswith('../'): assert not jarinfo.entries base = mozpath.join('chrome', jarinfo.name) for e in jarinfo.entries: if e.is_locale: src = mozpath.join( jarinfo.relativesrcdir or mozpath.dirname(obj.path), 'en-US', e.source) elif e.source.startswith('/'): src = mozpath.join(self.environment.topsrcdir, e.source[1:]) else: src = mozpath.join(mozpath.dirname(obj.path), e.source) if '*' in e.source: if e.preprocess: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) self._install_manifests[obj.install_target] \ .add_pattern_symlink( prefix, src[len(prefix):], mozpath.join(base, e.output)) continue if not os.path.exists(src): if e.is_locale: raise Exception( '%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = mozpath.join(self.environment.topobjdir, e.source[1:]) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = mozpath.join(obj.objdir, e.source) self._dependencies['install-%s' % obj.install_target] \ .append(mozpath.relpath( src, self.environment.topobjdir)) if e.preprocess: kwargs = {} if src.endswith('.css'): kwargs['marker'] = '%' self._add_preprocess( obj, src, mozpath.join(base, mozpath.dirname(e.output)), mozpath.basename(e.output), defines=defines, **kwargs) else: self._install_manifests[obj.install_target].add_symlink( src, mozpath.join(base, e.output)) manifest = mozpath.normpath(mozpath.join(obj.install_target, base)) manifest += '.manifest' for m in jarinfo.chrome_manifests: self._manifest_entries[manifest].append( m.replace('%', jarinfo.name + '/')) # ../ special cased for bug 1150417 again. if not jarinfo.name.startswith('../'): manifest = mozpath.normpath(mozpath.join(obj.install_target, 'chrome.manifest')) entry = 'manifest %s.manifest' % base if entry not in self._manifest_entries[manifest]: self._manifest_entries[manifest].append(entry) def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): mk.add_statement('%s = %s' % (var, self.environment.substs[var])) # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) target = '$(TOPOBJDIR)/%s' % target mk.create_rule([target]).add_dependencies( ['content = %s' % ' '.join('"%s"' % e for e in entries)]) mk.add_statement('MANIFEST_TARGETS = %s' % ' '.join(manifest_targets)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies( '$(TOPOBJDIR)/%s' % d for d in deps) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
class FasterMakeBackend(CommonBackend): def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._jar_manifests = OrderedDict() self._preprocess_files = OrderedDict() self._manifest_entries = OrderedDefaultDict(list) self._install_manifests = OrderedDefaultDict(InstallManifest) def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, []) if defines: defines = list(defines.get_defines()) if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and obj.install_target.startswith("dist/bin"): self._jar_manifests[obj.path] = (obj.objdir, obj.install_target, defines) elif isinstance(obj, VariablePassthru) and obj.install_target.startswith("dist/bin"): for f in obj.variables.get("EXTRA_COMPONENTS", {}): path = mozpath.join(obj.install_target, "components", mozpath.basename(f)) self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join("components", mozpath.basename(f)) ) if f.endswith(".manifest"): manifest = mozpath.join(obj.install_target, "chrome.manifest") self._manifest_entries[manifest].append("manifest components/%s" % mozpath.basename(f)) for f in obj.variables.get("EXTRA_PP_COMPONENTS", {}): path = mozpath.join(obj.install_target, "components", mozpath.basename(f)) self._preprocess_files[path] = (obj.srcdir, f, defines) if f.endswith(".manifest"): manifest = mozpath.join(obj.install_target, "chrome.manifest") self._manifest_entries[manifest].append("manifest components/%s" % mozpath.basename(f)) elif isinstance(obj, JavaScriptModules) and obj.install_target.startswith("dist/bin"): for path, strings in obj.modules.walk(): base = mozpath.join(obj.install_target, "modules", path) for f in strings: if obj.flavor == "extra": self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join("modules", path, mozpath.basename(f)) ) elif obj.flavor == "extra_pp": dest = mozpath.join(base, mozpath.basename(f)) self._preprocess_files[dest] = (obj.srcdir, f, defines) elif isinstance(obj, JsPreferenceFile) and obj.install_target.startswith("dist/bin"): # The condition for the directory value in config/rules.mk is: # ifneq (,$(DIST_SUBDIR)$(XPI_NAME)$(LIBXUL_SDK)) # - LIBXUL_SDK is not supported (it likely doesn't work in the # recursive backend anyways # - when XPI_NAME is set, obj.install_target will start with # dist/xpi-stage # - when DIST_SUBDIR is set, obj.install_target will start with # dist/bin/$(DIST_SUBDIR) # So an equivalent condition that is not cumbersome for us and that # is enough at least for now is checking if obj.install_target is # different from dist/bin. if obj.install_target == "dist/bin": pref_dir = "defaults/pref" else: pref_dir = "defaults/preferences" dest = mozpath.join(obj.install_target, pref_dir, mozpath.basename(obj.path)) # on win32, pref files need CRLF line endings... see bug 206029 if self.environment.substs["OS_ARCH"] == "WINNT": defines.append("--line-endings=crlf") # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. defines.append("--silence-missing-directive-warnings") self._preprocess_files[dest] = (obj.srcdir, obj.path, defines) elif isinstance(obj, Resources) and obj.install_target.startswith("dist/bin"): for path, strings in obj.resources.walk(): base = mozpath.join(obj.install_target, "res", path) for f in strings: flags = strings.flags_for(f) if flags and flags.preprocess: dest = mozpath.join(base, mozpath.basename(f)) defines = Defines(obj._context, obj.defines) defines = list(defines.get_defines()) defines.extend(["--marker", "%"]) self._preprocess_files[dest] = (obj.srcdir, f, defines) else: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join("res", path, mozpath.basename(f)) ) elif isinstance(obj, FinalTargetFiles) and obj.install_target.startswith("dist/bin"): for path, strings in obj.files.walk(): base = mozpath.join(obj.install_target, path) for f in strings: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(path, mozpath.basename(f)) ) elif isinstance(obj, DistFiles) and obj.install_target.startswith("dist/bin"): # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. defines.append("--silence-missing-directive-warnings") for f in obj.files: dest = mozpath.join(obj.install_target, mozpath.basename(f)) self._preprocess_files[dest] = (obj.srcdir, f, defines) else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(["default"]) mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir) mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir) # Add a few necessary variables inherited from configure for var in ("PYTHON", "ACDEFINES", "MOZ_CHROME_FILE_FORMAT"): mk.add_statement("%s = %s" % (var, self.environment.substs[var])) # Add all necessary information for jar manifest processing jar_mn_targets = [] for path, (objdir, install_target, defines) in self._jar_manifests.iteritems(): rel_manifest = mozpath.relpath(path, self.environment.topsrcdir) target = rel_manifest.replace("/", "-") assert target not in jar_mn_targets jar_mn_targets.append(target) target = "jar-%s" % target mk.create_rule([target]).add_dependencies([path]) if objdir != mozpath.join(self.environment.topobjdir, mozpath.dirname(rel_manifest)): mk.create_rule([target]).add_dependencies(["objdir = %s" % objdir]) if install_target != "dist/bin": mk.create_rule([target]).add_dependencies(["install_target = %s" % install_target]) if defines: mk.create_rule([target]).add_dependencies(["defines = %s" % " ".join(defines)]) mk.add_statement("JAR_MN_TARGETS = %s" % " ".join(jar_mn_targets)) # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) target = "$(TOPOBJDIR)/%s" % target mk.create_rule([target]).add_dependencies(["content = %s" % " ".join('"%s"' % e for e in entries)]) mk.add_statement("MANIFEST_TARGETS = %s" % " ".join(manifest_targets)) # Add information for preprocessed files. preprocess_targets = [] for target, (srcdir, f, defines) in self._preprocess_files.iteritems(): # This matches what PP_TARGETS do in config/rules. if target.endswith(".in"): target = target[:-3] # PP_TARGETS assumes this is true, but doesn't enforce it. assert target not in self._preprocess_files preprocess_targets.append(target) target = "$(TOPOBJDIR)/%s" % target mk.create_rule([target]).add_dependencies([mozpath.join(srcdir, f)]) if defines: mk.create_rule([target]).add_dependencies(["defines = %s" % " ".join(defines)]) mk.add_statement("PP_TARGETS = %s" % " ".join(preprocess_targets)) # Add information for install manifests. mk.add_statement("INSTALL_MANIFESTS = %s" % " ".join(self._install_manifests.keys())) mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk") for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, "faster", "install_%s" % base.replace("/", "_")) ) as fh: install_manifest.write(fileobj=fh) with self._write_file(mozpath.join(self.environment.topobjdir, "faster", "Makefile")) as fh: mk.dump(fh, removal_guard=False)
class FasterMakeBackend(CommonBackend): def _init(self): super(FasterMakeBackend, self)._init() self._seen_directories = set() self._defines = dict() self._jar_manifests = OrderedDict() self._manifest_entries = OrderedDefaultDict(list) self._install_manifests = OrderedDefaultDict(InstallManifest) def _add_preprocess(self, obj, path, dest, **kwargs): target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs) def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): defines = self._defines.get(obj.objdir, []) if defines: defines = list(defines.get_defines()) self._jar_manifests[obj.path] = (obj.objdir, obj.install_target, defines) elif isinstance(obj, VariablePassthru) and \ obj.install_target.startswith('dist/bin'): for f in obj.variables.get('EXTRA_COMPONENTS', {}): path = mozpath.join(obj.install_target, 'components', mozpath.basename(f)) self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join('components', mozpath.basename(f)) ) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) for f in obj.variables.get('EXTRA_PP_COMPONENTS', {}): self._add_preprocess(obj, f, 'components', defines=defines) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) elif isinstance(obj, JavaScriptModules) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.modules.walk(): base = mozpath.join('modules', path) for f in strings: if obj.flavor == 'extra': self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f)) ) elif obj.flavor == 'extra_pp': self._add_preprocess(obj, f, base, defines=defines) elif isinstance(obj, JsPreferenceFile) and \ obj.install_target.startswith('dist/bin'): # The condition for the directory value in config/rules.mk is: # ifneq (,$(DIST_SUBDIR)$(XPI_NAME)$(LIBXUL_SDK)) # - LIBXUL_SDK is not supported (it likely doesn't work in the # recursive backend anyways # - when XPI_NAME is set, obj.install_target will start with # dist/xpi-stage # - when DIST_SUBDIR is set, obj.install_target will start with # dist/bin/$(DIST_SUBDIR) # So an equivalent condition that is not cumbersome for us and that # is enough at least for now is checking if obj.install_target is # different from dist/bin. if obj.install_target == 'dist/bin': pref_dir = 'defaults/pref' else: pref_dir = 'defaults/preferences' dest = mozpath.join(obj.install_target, pref_dir, mozpath.basename(obj.path)) # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. self._add_preprocess(obj, obj.path, pref_dir, defines=defines, silence_missing_directive_warnings=True) elif isinstance(obj, Resources) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.resources.walk(): base = mozpath.join('res', path) for f in strings: flags = strings.flags_for(f) if flags and flags.preprocess: self._add_preprocess(obj, f, base, marker='%', defines=obj.defines) else: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f)) ) elif isinstance(obj, FinalTargetFiles) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.files.walk(): base = mozpath.join(obj.install_target, path) for f in strings: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(path, mozpath.basename(f)) ) elif isinstance(obj, DistFiles) and \ obj.install_target.startswith('dist/bin'): # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. for f in obj.files: self._add_preprocess(obj, f, '', defines=defines, silence_missing_directive_warnings=True) else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_CHROME_FILE_FORMAT', ): mk.add_statement('%s = %s' % (var, self.environment.substs[var])) # Add all necessary information for jar manifest processing jar_mn_targets = [] for path, (objdir, install_target, defines) in \ self._jar_manifests.iteritems(): rel_manifest = mozpath.relpath(path, self.environment.topsrcdir) target = rel_manifest.replace('/', '-') assert target not in jar_mn_targets jar_mn_targets.append(target) target = 'jar-%s' % target mk.create_rule([target]).add_dependencies([path]) if objdir != mozpath.join(self.environment.topobjdir, mozpath.dirname(rel_manifest)): mk.create_rule([target]).add_dependencies( ['objdir = %s' % objdir]) if install_target != 'dist/bin': mk.create_rule([target]).add_dependencies( ['install_target = %s' % install_target]) if defines: mk.create_rule([target]).add_dependencies( ['defines = %s' % ' '.join(defines)]) mk.add_statement('JAR_MN_TARGETS = %s' % ' '.join(jar_mn_targets)) # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) target = '$(TOPOBJDIR)/%s' % target mk.create_rule([target]).add_dependencies( ['content = %s' % ' '.join('"%s"' % e for e in entries)]) mk.add_statement('MANIFEST_TARGETS = %s' % ' '.join(manifest_targets)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
class TupOnly(CommonBackend, PartialBackend): """Backend that generates Tupfiles for the tup build system. """ def _init(self): CommonBackend._init(self) self._backend_files = {} self._cmd = MozbuildObject.from_environment() self._manifest_entries = OrderedDefaultDict(set) self._compile_env_gen_files = ( '*.c', '*.cpp', '*.h', '*.inc', '*.py', '*.rs', ) # These are 'group' dependencies - All rules that list these as an output # will be built before any rules that list this as an input. self._installed_idls = '$(MOZ_OBJ_ROOT)/<installed-idls>' self._installed_files = '$(MOZ_OBJ_ROOT)/<installed-files>' # The preprocessor including source-repo.h and buildid.h creates # dependencies that aren't specified by moz.build and cause errors # in Tup. Express these as a group dependency. self._early_generated_files = '$(MOZ_OBJ_ROOT)/<early-generated-files>' def _get_backend_file(self, relobjdir): objdir = mozpath.normpath(mozpath.join(self.environment.topobjdir, relobjdir)) if objdir not in self._backend_files: self._backend_files[objdir] = \ BackendTupfile(objdir, self.environment, self.environment.topsrcdir, self.environment.topobjdir) return self._backend_files[objdir] def _get_backend_file_for(self, obj): return self._get_backend_file(obj.relobjdir) def _py_action(self, action): cmd = [ '$(PYTHON)', '-m', 'mozbuild.action.%s' % action, ] return cmd def consume_object(self, obj): """Write out build files necessary to build with tup.""" if not isinstance(obj, ContextDerived): return False consumed = CommonBackend.consume_object(self, obj) if consumed: return True backend_file = self._get_backend_file_for(obj) if isinstance(obj, GeneratedFile): skip_files = [] if self.environment.is_artifact_build: skip_files = self._compile_env_gen_files for f in obj.outputs: if any(mozpath.match(f, p) for p in skip_files): return False if 'application.ini.h' in obj.outputs: # application.ini.h is a special case since we need to process # the FINAL_TARGET_PP_FILES for application.ini before running # the GENERATED_FILES script, and tup doesn't handle the rules # out of order. backend_file.delayed_generated_files.append(obj) else: self._process_generated_file(backend_file, obj) elif (isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith('dist/bin')): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath(obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, Defines): self._process_defines(backend_file, obj) elif isinstance(obj, HostDefines): self._process_defines(backend_file, obj, host=True) elif isinstance(obj, FinalTargetFiles): self._process_final_target_files(obj) elif isinstance(obj, FinalTargetPreprocessedFiles): self._process_final_target_pp_files(obj, backend_file) elif isinstance(obj, JARManifest): self._consume_jar_manifest(obj) elif isinstance(obj, PerSourceFlag): backend_file.per_source_flags[obj.file_name].extend(obj.flags) elif isinstance(obj, ComputedFlags): self._process_computed_flags(obj, backend_file) elif isinstance(obj, (Sources, GeneratedSources)): backend_file.sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, HostSources): backend_file.host_sources[obj.canonical_suffix].extend(obj.files) elif isinstance(obj, VariablePassthru): backend_file.variables = obj.variables # The top-level Makefile.in still contains our driver target and some # things related to artifact builds, so as a special case ensure the # make backend generates a Makefile there. if obj.objdir == self.environment.topobjdir: return False return True def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file(mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) for objdir, backend_file in sorted(self._backend_files.items()): for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) backend_file.gen_sources_rules([self._installed_files]) with self._write_file(fh=backend_file): pass with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value)) for (name, value) in sorted(self.environment.acdefines.iteritems())]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # TODO: BOOKMARKS_INCLUDE_DIR is used by bookmarks.html.in, and is # only defined in browser/locales/Makefile.in acdefines_flags += ' -DBOOKMARKS_INCLUDE_DIR=%s/browser/locales/en-US/profile' % self.environment.topsrcdir # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % ( os.path.relpath(self.environment.topsrcdir, self.environment.topobjdir) )) fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n') fh.write('PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write('IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n') # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init']) def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'layout/style/test', # HostSimplePrograms 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) if any(f in obj.outputs for f in ('source-repo.h', 'buildid.h')): extra_outputs = [self._early_generated_files] else: extra_outputs = [self._installed_files] if obj.required_for_compile else [] full_inputs += [self._early_generated_files] backend_file.rule( display='python {script}:{method} -> [%o]'.format(script=obj.script, method=obj.method), cmd=cmd, inputs=full_inputs, outputs=outputs, extra_outputs=extra_outputs, ) def _process_defines(self, backend_file, obj, host=False): defines = list(obj.get_defines()) if defines: if host: backend_file.host_defines = defines else: backend_file.defines = defines def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) if target.startswith('_tests'): # TODO: TEST_HARNESS_FILES present a few challenges for the tup # backend (bug 1372381). return for path, files in obj.files.walk(): for f in files: if not isinstance(f, ObjDirPath): backend_file = self._get_backend_file(mozpath.join(target, path)) if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception("Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self.backend_input_files.add(prefix) finder = FileFinder(prefix) for p, _ in finder.find(f.full_path[len(prefix):]): backend_file.symlink_rule(mozpath.join(prefix, p), output=mozpath.join(f.target_basename, p), output_group=self._installed_files) else: backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=self._installed_files) else: if (self.environment.is_artifact_build and any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)): # If we have an artifact build we never would have generated this file, # so do not attempt to install it. continue # We're not generating files in these directories yet, so # don't attempt to install files generated from them. if f.context.relobjdir not in ('layout/style/test', 'toolkit/library', 'js/src/shell'): output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path, f.target_basename) gen_backend_file = self._get_backend_file(f.context.relobjdir) gen_backend_file.symlink_rule(f.full_path, output=output, output_group=self._installed_files) def _process_final_target_pp_files(self, obj, backend_file): for i, (path, files) in enumerate(obj.files.walk()): for f in files: self._preprocess(backend_file, f.full_path, destdir=mozpath.join(self.environment.topobjdir, obj.install_target, path), target=f.target_basename) def _process_computed_flags(self, obj, backend_file): for var, flags in obj.get_flags(): backend_file.local_flags[var] = flags def _process_unified_sources(self, obj): backend_file = self._get_backend_file_for(obj) files = [f[0] for f in obj.unified_source_mapping] backend_file.sources[obj.canonical_suffix].extend(files) def _handle_idl_manager(self, manager): if self.environment.is_artifact_build: return dist_idl_backend_file = self._get_backend_file('dist/idl') for idl in manager.idls.values(): dist_idl_backend_file.symlink_rule(idl['source'], output_group=self._installed_idls) backend_file = self._get_backend_file('xpcom/xpidl') backend_file.export_shell() for module, data in sorted(manager.modules.iteritems()): dest, idls = data cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '-I$(IDL_PARSER_DIR)', '-I$(IDL_PARSER_CACHE_DIR)', '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py', '--cache-dir', '$(IDL_PARSER_CACHE_DIR)', '$(DIST)/idl', '$(DIST)/include', '$(DIST)/xpcrs', '$(MOZ_OBJ_ROOT)/%s/components' % dest, module, ] cmd.extend(sorted(idls)) outputs = ['$(MOZ_OBJ_ROOT)/%s/components/%s.xpt' % (dest, module)] outputs.extend(['$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in sorted(idls)]) outputs.extend(['$(MOZ_OBJ_ROOT)/dist/xpcrs/rt/%s.rs' % f for f in sorted(idls)]) outputs.extend(['$(MOZ_OBJ_ROOT)/dist/xpcrs/bt/%s.rs' % f for f in sorted(idls)]) backend_file.rule( inputs=[ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py', '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py', self._installed_idls, ], display='XPIDL %s' % module, cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], ) for manifest, entries in manager.interface_manifests.items(): for xpt in entries: self._manifest_entries[manifest].add('interfaces %s' % xpt) for m in manager.chrome_manifests: self._manifest_entries[m].add('manifest components/interfaces.manifest') def _preprocess(self, backend_file, input_file, destdir=None, target=None): if target is None: target = mozpath.basename(input_file) # .css files use '%' as the preprocessor marker, which must be scaped as # '%%' in the Tupfile. marker = '%%' if target.endswith('.css') else '#' cmd = self._py_action('preprocessor') cmd.extend([shell_quote(d) for d in backend_file.defines]) cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker]) base_input = mozpath.basename(target) if base_input.endswith('.in'): base_input = mozpath.splitext(base_input)[0] output = mozpath.join(destdir, base_input) if destdir else base_input backend_file.rule( inputs=[input_file], extra_inputs=[self._early_generated_files], display='Preprocess %o', cmd=cmd, outputs=[output], ) def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources, sorted_nonstatic_ipdl_sources, sorted_static_ipdl_sources, unified_ipdl_cppsrcs_mapping): # Preferably we wouldn't have to import ipdl, but we need to parse the # ast in order to determine the namespaces since they are used in the # header output paths. sys.path.append(mozpath.join(self.environment.topsrcdir, 'ipc', 'ipdl')) import ipdl backend_file = self._get_backend_file('ipc/ipdl') outheaderdir = '_ipdlheaders' srcdir = mozpath.join(self.environment.topsrcdir, 'ipc/ipdl') cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '%s/ipdl.py' % srcdir, '--sync-msg-list=%s/sync-messages.ini' % srcdir, '--msg-metadata=%s/message-metadata.ini' % srcdir, '--outheaders-dir=%s' % outheaderdir, '--outcpp-dir=.', ] ipdldirs = sorted(set(mozpath.dirname(p) for p in sorted_ipdl_sources)) cmd.extend(['-I%s' % d for d in ipdldirs]) cmd.extend(sorted_ipdl_sources) outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py'] for filename in sorted_ipdl_sources: filepath, ext = os.path.splitext(filename) dirname, basename = os.path.split(filepath) dirname = mozpath.relpath(dirname, self.environment.topsrcdir) extensions = [''] if ext == '.ipdl': extensions.extend(['Child', 'Parent']) with open(filename) as f: ast = ipdl.parse(f.read(), filename, includedirs=ipdldirs) self.backend_input_files.add(filename) headerdir = os.path.join(outheaderdir, *([ns.name for ns in ast.namespaces])) for extension in extensions: outputs.append("%s%s.cpp" % (basename, extension)) outputs.append(mozpath.join(headerdir, '%s%s.h' % (basename, extension))) backend_file.rule( display='IPDL code generation', cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) backend_file.sources['.cpp'].extend(u[0] for u in unified_ipdl_cppsrcs_mapping) def _handle_webidl_build(self, bindings_dir, unified_source_mapping, webidls, expected_build_output_files, global_define_files): backend_file = self._get_backend_file('dom/bindings') backend_file.export_shell() for source in sorted(webidls.all_preprocessed_sources()): self._preprocess(backend_file, source) cmd = self._py_action('webidl') cmd.append(mozpath.join(self.environment.topsrcdir, 'dom', 'bindings')) # The WebIDLCodegenManager knows all of the .cpp and .h files that will # be created (expected_build_output_files), but there are a few # additional files that are also created by the webidl py_action. outputs = [ '_cache/webidlyacc.py', 'codegen.json', 'codegen.pp', 'parser.out', ] outputs.extend(expected_build_output_files) backend_file.rule( display='WebIDL code generation', cmd=cmd, inputs=webidls.all_non_static_basenames(), outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) backend_file.sources['.cpp'].extend(u[0] for u in unified_source_mapping) backend_file.sources['.cpp'].extend(sorted(global_define_files)) test_backend_file = self._get_backend_file('dom/bindings/test') test_backend_file.sources['.cpp'].extend(sorted('../%sBinding.cpp' % s for s in webidls.all_test_stems()))
class TreeMetadataEmitter(LoggingMixin): """Converts the executed mozbuild files into data structures. This is a bridge between reader.py and data.py. It takes what was read by reader.BuildReader and converts it into the classes defined in the data module. """ def __init__(self, config): self.populate_logger() self.config = config mozinfo.find_and_update_from_json(config.topobjdir) # Python 2.6 doesn't allow unicode keys to be used for keyword # arguments. This gross hack works around the problem until we # rid ourselves of 2.6. self.info = {} for k, v in mozinfo.info.items(): if isinstance(k, unicode): k = k.encode('ascii') self.info[k] = v self._libs = OrderedDefaultDict(list) self._binaries = OrderedDict() self._linkage = [] self._static_linking_shared = set() # Keep track of external paths (third party build systems), starting # from what we run a subconfigure in. We'll eliminate some directories # as we traverse them with moz.build (e.g. js/src). subconfigures = os.path.join(self.config.topobjdir, 'subconfigures') paths = [] if os.path.exists(subconfigures): paths = open(subconfigures).read().splitlines() self._external_paths = set(mozpath.normsep(d) for d in paths) # Add security/nss manually, since it doesn't have a subconfigure. self._external_paths.add('security/nss') def emit(self, output): """Convert the BuildReader output into data structures. The return value from BuildReader.read_topsrcdir() (a generator) is typically fed into this function. """ file_count = 0 sandbox_execution_time = 0.0 emitter_time = 0.0 contexts = {} def emit_objs(objs): for o in objs: yield o if not o._ack: raise Exception('Unhandled object of type %s' % type(o)) for out in output: # Nothing in sub-contexts is currently of interest to us. Filter # them all out. if isinstance(out, SubContext): continue if isinstance(out, Context): # Keep all contexts around, we will need them later. contexts[out.objdir] = out start = time.time() # We need to expand the generator for the timings to work. objs = list(self.emit_from_context(out)) emitter_time += time.time() - start for o in emit_objs(objs): yield o # Update the stats. file_count += len(out.all_paths) sandbox_execution_time += out.execution_time else: raise Exception('Unhandled output type: %s' % type(out)) # Don't emit Linkable objects when COMPILE_ENVIRONMENT is explicitely # set to a value meaning false (usually ''). if self.config.substs.get('COMPILE_ENVIRONMENT', True): start = time.time() objs = list(self._emit_libs_derived(contexts)) emitter_time += time.time() - start for o in emit_objs(objs): yield o yield ReaderSummary(file_count, sandbox_execution_time, emitter_time) def _emit_libs_derived(self, contexts): # First do FINAL_LIBRARY linkage. for lib in (l for libs in self._libs.values() for l in libs): if not isinstance(lib, StaticLibrary) or not lib.link_into: continue if lib.link_into not in self._libs: raise SandboxValidationError( 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME' % lib.link_into, contexts[lib.objdir]) candidates = self._libs[lib.link_into] # When there are multiple candidates, but all are in the same # directory and have a different type, we want all of them to # have the library linked. The typical usecase is when building # both a static and a shared library in a directory, and having # that as a FINAL_LIBRARY. if len(set(type(l) for l in candidates)) == len(candidates) and \ len(set(l.objdir for l in candidates)) == 1: for c in candidates: c.link_library(lib) else: raise SandboxValidationError( 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in ' 'multiple places:\n %s' % (lib.link_into, '\n '.join(l.objdir for l in candidates)), contexts[lib.objdir]) # Next, USE_LIBS linkage. for context, obj, variable in self._linkage: self._link_libraries(context, obj, variable) def recurse_refs(lib): for o in lib.refs: yield o if isinstance(o, StaticLibrary): for q in recurse_refs(o): yield q # Check that all static libraries refering shared libraries in # USE_LIBS are linked into a shared library or program. for lib in self._static_linking_shared: if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)): shared_libs = sorted(l.basename for l in lib.linked_libraries if isinstance(l, SharedLibrary)) raise SandboxValidationError( 'The static "%s" library is not used in a shared library ' 'or a program, but USE_LIBS contains the following shared ' 'library names:\n %s\n\nMaybe you can remove the ' 'static "%s" library?' % (lib.basename, '\n '.join(shared_libs), lib.basename), contexts[lib.objdir]) # Propagate LIBRARY_DEFINES to all child libraries recursively. def propagate_defines(outerlib, defines): outerlib.defines.update(defines) for lib in outerlib.linked_libraries: # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS # paths. if (isinstance(lib, StaticLibrary) and lib.link_into == outerlib.basename): propagate_defines(lib, defines) for lib in (l for libs in self._libs.values() for l in libs): if isinstance(lib, Library): propagate_defines(lib, lib.defines) yield lib for obj in self._binaries.values(): yield obj LIBRARY_NAME_VAR = { 'host': 'HOST_LIBRARY_NAME', 'target': 'LIBRARY_NAME', } def _link_libraries(self, context, obj, variable): """Add linkage declarations to a given object.""" assert isinstance(obj, Linkable) for path in context.get(variable, []): force_static = path.startswith('static:') and obj.KIND == 'target' if force_static: path = path[7:] name = mozpath.basename(path) dir = mozpath.dirname(path) candidates = [l for l in self._libs[name] if l.KIND == obj.KIND] if dir: if dir.startswith('/'): dir = mozpath.normpath( mozpath.join(obj.topobjdir, dir[1:])) else: dir = mozpath.normpath( mozpath.join(obj.objdir, dir)) dir = mozpath.relpath(dir, obj.topobjdir) candidates = [l for l in candidates if l.relobjdir == dir] if not candidates: # If the given directory is under one of the external # (third party) paths, use a fake library reference to # there. for d in self._external_paths: if dir.startswith('%s/' % d): candidates = [self._get_external_library(dir, name, force_static)] break if not candidates: raise SandboxValidationError( '%s contains "%s", but there is no "%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), context) if len(candidates) > 1: # If there's more than one remaining candidate, it could be # that there are instances for the same library, in static and # shared form. libs = {} for l in candidates: key = mozpath.join(l.relobjdir, l.basename) if force_static: if isinstance(l, StaticLibrary): libs[key] = l else: if key in libs and isinstance(l, SharedLibrary): libs[key] = l if key not in libs: libs[key] = l candidates = libs.values() if force_static and not candidates: if dir: raise SandboxValidationError( '%s contains "static:%s", but there is no static ' '"%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), context) raise SandboxValidationError( '%s contains "static:%s", but there is no static "%s" ' '%s in the tree' % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]), context) if not candidates: raise SandboxValidationError( '%s contains "%s", which does not match any %s in the tree.' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]), context) elif len(candidates) > 1: paths = (mozpath.join(l.relativedir, 'moz.build') for l in candidates) raise SandboxValidationError( '%s contains "%s", which matches a %s defined in multiple ' 'places:\n %s' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND], '\n '.join(paths)), context) elif force_static and not isinstance(candidates[0], StaticLibrary): raise SandboxValidationError( '%s contains "static:%s", but there is only a shared "%s" ' 'in %s. You may want to add FORCE_STATIC_LIB=True in ' '%s/moz.build, or remove "static:".' % (variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir), context) elif isinstance(obj, StaticLibrary) and isinstance(candidates[0], SharedLibrary): self._static_linking_shared.add(obj) obj.link_library(candidates[0]) # Link system libraries from OS_LIBS/HOST_OS_LIBS. for lib in context.get(variable.replace('USE', 'OS'), []): obj.link_system_library(lib) @memoize def _get_external_library(self, dir, name, force_static): # Create ExternalStaticLibrary or ExternalSharedLibrary object with a # context more or less truthful about where the external library is. context = Context(config=self.config) context.add_source(mozpath.join(self.config.topsrcdir, dir, 'dummy')) if force_static: return ExternalStaticLibrary(context, name) else: return ExternalSharedLibrary(context, name) def _handle_libraries(self, context): host_libname = context.get('HOST_LIBRARY_NAME') libname = context.get('LIBRARY_NAME') if host_libname: if host_libname == libname: raise SandboxValidationError('LIBRARY_NAME and ' 'HOST_LIBRARY_NAME must have a different value', context) lib = HostLibrary(context, host_libname) self._libs[host_libname].append(lib) self._linkage.append((context, lib, 'HOST_USE_LIBS')) final_lib = context.get('FINAL_LIBRARY') if not libname and final_lib: # If no LIBRARY_NAME is given, create one. libname = context.relsrcdir.replace('/', '_') static_lib = context.get('FORCE_STATIC_LIB') shared_lib = context.get('FORCE_SHARED_LIB') static_name = context.get('STATIC_LIBRARY_NAME') shared_name = context.get('SHARED_LIBRARY_NAME') is_framework = context.get('IS_FRAMEWORK') is_component = context.get('IS_COMPONENT') soname = context.get('SONAME') lib_defines = context.get('LIBRARY_DEFINES') shared_args = {} static_args = {} if final_lib: if static_lib: raise SandboxValidationError( 'FINAL_LIBRARY implies FORCE_STATIC_LIB. ' 'Please remove the latter.', context) if shared_lib: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. ' 'Please remove one.', context) if is_framework: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_FRAMEWORK. ' 'Please remove one.', context) if is_component: raise SandboxValidationError( 'FINAL_LIBRARY conflicts with IS_COMPONENT. ' 'Please remove one.', context) static_args['link_into'] = final_lib static_lib = True if libname: if is_component: if static_lib: raise SandboxValidationError( 'IS_COMPONENT conflicts with FORCE_STATIC_LIB. ' 'Please remove one.', context) shared_lib = True shared_args['variant'] = SharedLibrary.COMPONENT if is_framework: if soname: raise SandboxValidationError( 'IS_FRAMEWORK conflicts with SONAME. ' 'Please remove one.', context) shared_lib = True shared_args['variant'] = SharedLibrary.FRAMEWORK if not static_lib and not shared_lib: static_lib = True if static_name: if not static_lib: raise SandboxValidationError( 'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB', context) static_args['real_name'] = static_name if shared_name: if not shared_lib: raise SandboxValidationError( 'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB', context) shared_args['real_name'] = shared_name if soname: if not shared_lib: raise SandboxValidationError( 'SONAME requires FORCE_SHARED_LIB', context) shared_args['soname'] = soname # If both a shared and a static library are created, only the # shared library is meant to be a SDK library. if context.get('SDK_LIBRARY'): if shared_lib: shared_args['is_sdk'] = True elif static_lib: static_args['is_sdk'] = True if shared_lib and static_lib: if not static_name and not shared_name: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but neither STATIC_LIBRARY_NAME or ' 'SHARED_LIBRARY_NAME is set. At least one is required.', context) if static_name and not shared_name and static_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and SHARED_LIBRARY_NAME is unset. Please either ' 'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set ' 'SHARED_LIBRARY_NAME.', context) if shared_name and not static_name and shared_name == libname: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, ' 'and STATIC_LIBRARY_NAME is unset. Please either ' 'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set ' 'STATIC_LIBRARY_NAME.', context) if shared_name and static_name and shared_name == static_name: raise SandboxValidationError( 'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, ' 'but SHARED_LIBRARY_NAME is the same as ' 'STATIC_LIBRARY_NAME. Please change one of them.', context) if shared_lib: lib = SharedLibrary(context, libname, **shared_args) self._libs[libname].append(lib) self._linkage.append((context, lib, 'USE_LIBS')) if static_lib: lib = StaticLibrary(context, libname, **static_args) self._libs[libname].append(lib) self._linkage.append((context, lib, 'USE_LIBS')) if lib_defines: if not libname: raise SandboxValidationError('LIBRARY_DEFINES needs a ' 'LIBRARY_NAME to take effect', context) lib.defines.update(lib_defines) def emit_from_context(self, context): """Convert a Context to tree metadata objects. This is a generator of mozbuild.frontend.data.ContextDerived instances. """ # We only want to emit an InstallationTarget if one of the consulted # variables is defined. Later on, we look up FINAL_TARGET, which has # the side-effect of populating it. So, we need to do this lookup # early. if any(k in context for k in ('FINAL_TARGET', 'XPI_NAME', 'DIST_SUBDIR')): yield InstallationTarget(context) # We always emit a directory traversal descriptor. This is needed by # the recursive make backend. for o in self._emit_directory_traversal_from_context(context): yield o for path in context['CONFIGURE_SUBST_FILES']: yield self._create_substitution(ConfigFileSubstitution, context, path) for path in context['CONFIGURE_DEFINE_FILES']: yield self._create_substitution(HeaderFileSubstitution, context, path) for obj in self._process_xpidl(context): yield obj # Proxy some variables as-is until we have richer classes to represent # them. We should aim to keep this set small because it violates the # desired abstraction of the build definition away from makefiles. passthru = VariablePassthru(context) varlist = [ 'ANDROID_GENERATED_RESFILES', 'ANDROID_RES_DIRS', 'DISABLE_STL_WRAPPING', 'EXTRA_ASSEMBLER_FLAGS', 'EXTRA_COMPILE_FLAGS', 'EXTRA_COMPONENTS', 'EXTRA_DSO_LDOPTS', 'EXTRA_PP_COMPONENTS', 'FAIL_ON_WARNINGS', 'USE_STATIC_LIBS', 'IS_GYP_DIR', 'MSVC_ENABLE_PGO', 'NO_DIST_INSTALL', 'PYTHON_UNIT_TESTS', 'RCFILE', 'RESFILE', 'RCINCLUDE', 'DEFFILE', 'WIN32_EXE_LDFLAGS', 'LD_VERSION_SCRIPT', ] for v in varlist: if v in context and context[v]: passthru.variables[v] = context[v] if context.config.substs.get('OS_TARGET') == 'WINNT' and \ context['DELAYLOAD_DLLS']: context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll) for dll in context['DELAYLOAD_DLLS']]) context['OS_LIBS'].append('delayimp') for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']: if v in context and context[v]: passthru.variables['MOZBUILD_' + v] = context[v] # NO_VISIBILITY_FLAGS is slightly different if context['NO_VISIBILITY_FLAGS']: passthru.variables['VISIBILITY_FLAGS'] = '' for obj in self._process_sources(context, passthru): yield obj exports = context.get('EXPORTS') if exports: yield Exports(context, exports, dist_install=not context.get('NO_DIST_INSTALL', False)) for obj in self._process_generated_files(context): yield obj for obj in self._process_test_harness_files(context): yield obj defines = context.get('DEFINES') if defines: yield Defines(context, defines) resources = context.get('RESOURCE_FILES') if resources: yield Resources(context, resources, defines) for pref in sorted(context['JS_PREFERENCE_FILES']): yield JsPreferenceFile(context, pref) self._handle_programs(context) extra_js_modules = context.get('EXTRA_JS_MODULES') if extra_js_modules: yield JavaScriptModules(context, extra_js_modules, 'extra') extra_pp_js_modules = context.get('EXTRA_PP_JS_MODULES') if extra_pp_js_modules: yield JavaScriptModules(context, extra_pp_js_modules, 'extra_pp') test_js_modules = context.get('TESTING_JS_MODULES') if test_js_modules: yield JavaScriptModules(context, test_js_modules, 'testing') simple_lists = [ ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile), ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile), ('IPDL_SOURCES', IPDLFile), ('GENERATED_INCLUDES', GeneratedInclude), ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile), ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile), ('TEST_WEBIDL_FILES', TestWebIDLFile), ('WEBIDL_FILES', WebIDLFile), ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface), ] for context_var, klass in simple_lists: for name in context.get(context_var, []): yield klass(context, name) for local_include in context.get('LOCAL_INCLUDES', []): if local_include.startswith('/'): path = context.config.topsrcdir relative_include = local_include[1:] else: path = context.srcdir relative_include = local_include actual_include = os.path.join(path, relative_include) if not os.path.exists(actual_include): raise SandboxValidationError('Path specified in LOCAL_INCLUDES ' 'does not exist: %s (resolved to %s)' % (local_include, actual_include), context) yield LocalInclude(context, local_include) final_target_files = context.get('FINAL_TARGET_FILES') if final_target_files: yield FinalTargetFiles(context, final_target_files, context['FINAL_TARGET']) dist_files = context.get('DIST_FILES') if dist_files: for f in dist_files: path = os.path.join(context.srcdir, f) if not os.path.exists(path): raise SandboxValidationError('File listed in DIST_FILES ' 'does not exist: %s' % f, context) yield DistFiles(context, dist_files, context['FINAL_TARGET']) self._handle_libraries(context) for obj in self._process_test_manifests(context): yield obj for obj in self._process_jar_manifests(context): yield obj for name, jar in context.get('JAVA_JAR_TARGETS', {}).items(): yield ContextWrapped(context, jar) for name, data in context.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items(): yield ContextWrapped(context, data) if passthru.variables: yield passthru def _create_substitution(self, cls, context, path): if os.path.isabs(path): path = path[1:] sub = cls(context) sub.input_path = mozpath.join(context.srcdir, '%s.in' % path) sub.output_path = mozpath.join(context.objdir, path) sub.relpath = path return sub def _process_sources(self, context, passthru): for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'): for src in (context[symbol] or []): if not os.path.exists(mozpath.join(context.srcdir, src)): raise SandboxValidationError('File listed in %s does not ' 'exist: \'%s\'' % (symbol, src), context) no_pgo = context.get('NO_PGO') sources = context.get('SOURCES', []) no_pgo_sources = [f for f in sources if sources[f].no_pgo] if no_pgo: if no_pgo_sources: raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo ' 'cannot be set at the same time', context) passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo if no_pgo_sources: passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources # A map from "canonical suffixes" for a particular source file # language to the range of suffixes associated with that language. # # We deliberately don't list the canonical suffix in the suffix list # in the definition; we'll add it in programmatically after defining # things. suffix_map = { '.s': set(['.asm']), '.c': set(), '.m': set(), '.mm': set(), '.cpp': set(['.cc', '.cxx']), '.S': set(), } # The inverse of the above, mapping suffixes to their canonical suffix. canonicalized_suffix_map = {} for suffix, alternatives in suffix_map.iteritems(): alternatives.add(suffix) for a in alternatives: canonicalized_suffix_map[a] = suffix def canonical_suffix_for_file(f): return canonicalized_suffix_map[mozpath.splitext(f)[1]] # A map from moz.build variables to the canonical suffixes of file # kinds that can be listed therein. all_suffixes = list(suffix_map.keys()) varmap = dict( SOURCES=(Sources, all_suffixes), HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']), UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']), GENERATED_SOURCES=(GeneratedSources, all_suffixes), ) for variable, (klass, suffixes) in varmap.items(): allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes]) # First ensure that we haven't been given filetypes that we don't # recognize. for f in context[variable]: ext = mozpath.splitext(f)[1] if ext not in allowed_suffixes: raise SandboxValidationError( '%s has an unknown file type.' % f, context) if variable.startswith('GENERATED_'): l = passthru.variables.setdefault('GARBAGE', []) l.append(f) # Now sort the files to let groupby work. sorted_files = sorted(context[variable], key=canonical_suffix_for_file) for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file): arglist = [context, list(files), canonical_suffix] if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context: arglist.append(context['FILES_PER_UNIFIED_FILE']) yield klass(*arglist) sources_with_flags = [f for f in sources if sources[f].flags] for f in sources_with_flags: ext = mozpath.splitext(f)[1] yield PerSourceFlag(context, f, sources[f].flags) def _process_xpidl(self, context): # XPIDL source files get processed and turned into .h and .xpt files. # If there are multiple XPIDL files in a directory, they get linked # together into a final .xpt, which has the name defined by # XPIDL_MODULE. xpidl_module = context['XPIDL_MODULE'] if context['XPIDL_SOURCES'] and not xpidl_module: raise SandboxValidationError('XPIDL_MODULE must be defined if ' 'XPIDL_SOURCES is defined.', context) if xpidl_module and not context['XPIDL_SOURCES']: raise SandboxValidationError('XPIDL_MODULE cannot be defined ' 'unless there are XPIDL_SOURCES', context) if context['XPIDL_SOURCES'] and context['NO_DIST_INSTALL']: self.log(logging.WARN, 'mozbuild_warning', dict( path=context.main_path), '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.') for idl in context['XPIDL_SOURCES']: yield XPIDLFile(context, mozpath.join(context.srcdir, idl), xpidl_module) def _process_generated_files(self, context): generated_files = context.get('GENERATED_FILES') if not generated_files: return for f in generated_files: flags = generated_files[f] output = f if flags.script: method = "main" # Deal with cases like "C:\\path\\to\\script.py:function". if not flags.script.endswith('.py') and ':' in flags.script: script, method = flags.script.rsplit(':', 1) else: script = flags.script script = mozpath.join(context.srcdir, script) inputs = [mozpath.join(context.srcdir, i) for i in flags.inputs] if not os.path.exists(script): raise SandboxValidationError( 'Script for generating %s does not exist: %s' % (f, script), context) if os.path.splitext(script)[1] != '.py': raise SandboxValidationError( 'Script for generating %s does not end in .py: %s' % (f, script), context) for i in inputs: if not os.path.exists(i): raise SandboxValidationError( 'Input for generating %s does not exist: %s' % (f, i), context) else: script = None method = None inputs = [] yield GeneratedFile(context, script, method, output, inputs) def _process_test_harness_files(self, context): test_harness_files = context.get('TEST_HARNESS_FILES') if not test_harness_files: return srcdir_files = defaultdict(list) srcdir_pattern_files = defaultdict(list) objdir_files = defaultdict(list) for path, strings in test_harness_files.walk(): if not path and strings: raise SandboxValidationError( 'Cannot install files to the root of TEST_HARNESS_FILES', context) for s in strings: if context.is_objdir_path(s): if s.startswith('!/'): objdir_files[path].append('$(DEPTH)/%s' % s[2:]) else: objdir_files[path].append(s[1:]) else: resolved = context.resolve_path(s) if '*' in s: if s[0] == '/': pattern_start = resolved.index('*') base_path = mozpath.dirname(resolved[:pattern_start]) pattern = resolved[len(base_path)+1:] else: base_path = context.srcdir pattern = s srcdir_pattern_files[path].append((base_path, pattern)); elif not os.path.exists(resolved): raise SandboxValidationError( 'File listed in TEST_HARNESS_FILES does not exist: %s' % s, context) else: srcdir_files[path].append(resolved) yield TestHarnessFiles(context, srcdir_files, srcdir_pattern_files, objdir_files) def _handle_programs(self, context): for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]: program = context.get(kind) if program: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" as %s name, ' 'because it is already used in %s' % (program, kind, self._binaries[program].relativedir), context) self._binaries[program] = cls(context, program) self._linkage.append((context, self._binaries[program], kind.replace('PROGRAM', 'USE_LIBS'))) for kind, cls in [ ('SIMPLE_PROGRAMS', SimpleProgram), ('CPP_UNIT_TESTS', SimpleProgram), ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]: for program in context[kind]: if program in self._binaries: raise SandboxValidationError( 'Cannot use "%s" in %s, ' 'because it is already used in %s' % (program, kind, self._binaries[program].relativedir), context) self._binaries[program] = cls(context, program, is_unit_test=kind == 'CPP_UNIT_TESTS') self._linkage.append((context, self._binaries[program], 'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS' else 'USE_LIBS')) def _process_test_manifests(self, context): # While there are multiple test manifests, the behavior is very similar # across them. We enforce this by having common handling of all # manifests and outputting a single class type with the differences # described inside the instance. # # Keys are variable prefixes and values are tuples describing how these # manifests should be handled: # # (flavor, install_prefix, package_tests) # # flavor identifies the flavor of this test. # install_prefix is the path prefix of where to install the files in # the tests directory. # package_tests indicates whether to package test files into the test # package; suites that compile the test files should not install # them into the test package. # test_manifests = dict( A11Y=('a11y', 'testing/mochitest', 'a11y', True), BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True), ANDROID_INSTRUMENTATION=('instrumentation', 'instrumentation', '.', False), JETPACK_PACKAGE=('jetpack-package', 'testing/mochitest', 'jetpack-package', True), JETPACK_ADDON=('jetpack-addon', 'testing/mochitest', 'jetpack-addon', False), METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True), MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True), MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True), MOCHITEST_WEBAPPRT_CONTENT=('webapprt-content', 'testing/mochitest', 'webapprtContent', True), MOCHITEST_WEBAPPRT_CHROME=('webapprt-chrome', 'testing/mochitest', 'webapprtChrome', True), WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True), XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', True), ) for prefix, info in test_manifests.items(): for path in context.get('%s_MANIFESTS' % prefix, []): for obj in self._process_test_manifest(context, info, path): yield obj for flavor in ('crashtest', 'reftest'): for path in context.get('%s_MANIFESTS' % flavor.upper(), []): for obj in self._process_reftest_manifest(context, flavor, path): yield obj def _process_test_manifest(self, context, info, manifest_path): flavor, install_root, install_subdir, package_tests = info manifest_path = mozpath.normpath(manifest_path) path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path)) manifest_dir = mozpath.dirname(path) manifest_reldir = mozpath.dirname(mozpath.relpath(path, context.config.topsrcdir)) install_prefix = mozpath.join(install_root, install_subdir) try: m = manifestparser.TestManifest(manifests=[path], strict=True, rootdir=context.config.topsrcdir) defaults = m.manifest_defaults[os.path.normpath(path)] if not m.tests and not 'support-files' in defaults: raise SandboxValidationError('Empty test manifest: %s' % path, context) obj = TestManifest(context, path, m, flavor=flavor, install_prefix=install_prefix, relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), dupe_manifest='dupe-manifest' in defaults) filtered = m.tests # Jetpack add-on tests are expected to be generated during the # build process so they won't exist here. if flavor != 'jetpack-addon': missing = [t['name'] for t in filtered if not os.path.exists(t['path'])] if missing: raise SandboxValidationError('Test manifest (%s) lists ' 'test that does not exist: %s' % ( path, ', '.join(missing)), context) out_dir = mozpath.join(install_prefix, manifest_reldir) if 'install-to-subdir' in defaults: # This is terrible, but what are you going to do? out_dir = mozpath.join(out_dir, defaults['install-to-subdir']) obj.manifest_obj_relpath = mozpath.join(manifest_reldir, defaults['install-to-subdir'], mozpath.basename(path)) # "head" and "tail" lists. # All manifests support support-files. # # Keep a set of already seen support file patterns, because # repeatedly processing the patterns from the default section # for every test is quite costly (see bug 922517). extras = (('head', set()), ('tail', set()), ('support-files', set())) def process_support_files(test): for thing, seen in extras: value = test.get(thing, '') if value in seen: continue seen.add(value) for pattern in value.split(): # We only support globbing on support-files because # the harness doesn't support * for head and tail. if '*' in pattern and thing == 'support-files': obj.pattern_installs.append( (manifest_dir, pattern, out_dir)) # "absolute" paths identify files that are to be # placed in the install_root directory (no globs) elif pattern[0] == '/': full = mozpath.normpath(mozpath.join(manifest_dir, mozpath.basename(pattern))) obj.installs[full] = (mozpath.join(install_root, pattern[1:]), False) else: full = mozpath.normpath(mozpath.join(manifest_dir, pattern)) dest_path = mozpath.join(out_dir, pattern) # If the path resolves to a different directory # tree, we take special behavior depending on the # entry type. if not full.startswith(manifest_dir): # If it's a support file, we install the file # into the current destination directory. # This implementation makes installing things # with custom prefixes impossible. If this is # needed, we can add support for that via a # special syntax later. if thing == 'support-files': dest_path = mozpath.join(out_dir, os.path.basename(pattern)) # If it's not a support file, we ignore it. # This preserves old behavior so things like # head files doesn't get installed multiple # times. else: continue obj.installs[full] = (mozpath.normpath(dest_path), False) for test in filtered: obj.tests.append(test) # Some test files are compiled and should not be copied into the # test package. They function as identifiers rather than files. if package_tests: manifest_relpath = mozpath.relpath(test['path'], mozpath.dirname(test['manifest'])) obj.installs[mozpath.normpath(test['path'])] = \ ((mozpath.join(out_dir, manifest_relpath)), True) process_support_files(test) if not filtered: # If there are no tests, look for support-files under DEFAULT. process_support_files(defaults) # We also copy manifests into the output directory, # including manifests from [include:foo] directives. for mpath in m.manifests(): mpath = mozpath.normpath(mpath) out_path = mozpath.join(out_dir, mozpath.basename(mpath)) obj.installs[mpath] = (out_path, False) # Some manifests reference files that are auto generated as # part of the build or shouldn't be installed for some # reason. Here, we prune those files from the install set. # FUTURE we should be able to detect autogenerated files from # other build metadata. Once we do that, we can get rid of this. for f in defaults.get('generated-files', '').split(): # We re-raise otherwise the stack trace isn't informative. try: del obj.installs[mozpath.join(manifest_dir, f)] except KeyError: raise SandboxValidationError('Error processing test ' 'manifest %s: entry in generated-files not present ' 'elsewhere in manifest: %s' % (path, f), context) obj.external_installs.add(mozpath.join(out_dir, f)) yield obj except (AssertionError, Exception): raise SandboxValidationError('Error processing test ' 'manifest file %s: %s' % (path, '\n'.join(traceback.format_exception(*sys.exc_info()))), context) def _process_reftest_manifest(self, context, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath(mozpath.join( context.srcdir, manifest_path)) manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, context.config.topsrcdir)) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. obj = TestManifest(context, manifest_full_path, manifest, flavor=flavor, install_prefix='%s/' % flavor, relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path))) for test in sorted(manifest.files): obj.tests.append({ 'path': test, 'here': mozpath.dirname(test), 'manifest': manifest_full_path, 'name': mozpath.basename(test), 'head': '', 'tail': '', 'support-files': '', 'subsuite': '', }) yield obj def _process_jar_manifests(self, context): jar_manifests = context.get('JAR_MANIFESTS', []) if len(jar_manifests) > 1: raise SandboxValidationError('While JAR_MANIFESTS is a list, ' 'it is currently limited to one value.', context) for path in jar_manifests: yield JARManifest(context, mozpath.join(context.srcdir, path)) # Temporary test to look for jar.mn files that creep in without using # the new declaration. Before, we didn't require jar.mn files to # declared anywhere (they were discovered). This will detect people # relying on the old behavior. if os.path.exists(os.path.join(context.srcdir, 'jar.mn')): if 'jar.mn' not in jar_manifests: raise SandboxValidationError('A jar.mn exists but it ' 'is not referenced in the moz.build file. ' 'Please define JAR_MANIFESTS.', context) def _emit_directory_traversal_from_context(self, context): o = DirectoryTraversal(context) o.dirs = context.get('DIRS', []) o.test_dirs = context.get('TEST_DIRS', []) o.affected_tiers = context.get_affected_tiers() # Some paths have a subconfigure, yet also have a moz.build. Those # shouldn't end up in self._external_paths. self._external_paths -= { o.relobjdir } yield o