def _process_reftest_manifest(self, sandbox, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath(mozpath.join(sandbox["SRCDIR"], manifest_path)) manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, sandbox["TOPSRCDIR"])) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. obj = TestManifest( sandbox, manifest_full_path, manifest, flavor=flavor, install_prefix="%s/" % flavor, relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)), ) for test in sorted(manifest.files): obj.tests.append( { "path": test, "here": mozpath.dirname(test), "manifest": manifest_full_path, "name": mozpath.basename(test), "head": "", "tail": "", "support-files": "", "subsuite": "", } ) yield obj
def _process_files(self, obj, files, target, preprocessor = False, marker='#', target_is_file=False, optional=False): for f in files: if optional: full_dest = f elif target_is_file: full_dest = target else: full_dest = mozpath.join(target, mozpath.basename(f)) install_manifest, dest = self._get_manifest_from_target(full_dest) source = None if (obj is None) else mozpath.normpath(mozpath.join(obj.srcdir, f)) if preprocessor: dep_file = mozpath.join(self.dep_path, target, mozpath.basename(f) +'.pp') exist_defines = self._paths_to_defines.get(obj.srcdir, {}) xul_defines = dict(exist_defines) for flag in self.XULPPFLAGS: if flag.startswith('-D'): define = flag[2:].split('=') xul_defines[define[0]] = define[1] if len(define) >= 2 else '' defines = compute_defines(obj.config, defines = xul_defines) new_marker = marker if marker == 'jar': new_marker = '%' if f.endswith('.css') else '#' install_manifest.add_preprocess(source, dest, dep_file, marker=new_marker, defines=defines) elif optional: install_manifest.add_optional_exists(dest) else: install_manifest.add_symlink(source, dest)
def add_manifest(self, entry): # Store manifest entries in a single manifest per directory, named # after their parent directory, except for root manifests, all named # chrome.manifest. if entry.base: name = mozpath.basename(entry.base) else: name = 'chrome' path = mozpath.normpath(mozpath.join(entry.base, '%s.manifest' % name)) if not self.copier.contains(path): # Add a reference to the manifest file in the parent manifest, if # the manifest file is not a root manifest. if entry.base: parent = mozpath.dirname(entry.base) relbase = mozpath.basename(entry.base) relpath = mozpath.join(relbase, mozpath.basename(path)) self.add_manifest(Manifest(parent, relpath)) self.copier.add(path, ManifestFile(entry.base)) if isinstance(entry, ManifestChrome): data = self._chrome_db.setdefault(entry.name, {}) entries = data.setdefault(entry.type, []) for e in entries: # Ideally, we'd actually check whether entry.flags are more # specific than e.flags, but in practice the following test # is enough for now. if not entry.flags or e.flags and entry.flags == e.flags: errors.fatal('"%s" overrides "%s"' % (entry, e)) entries.append(entry) self.copier[path].add(entry)
def _process_reftest_manifest(self, sandbox, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath(mozpath.join( sandbox['SRCDIR'], manifest_path)) manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, sandbox['TOPSRCDIR'])) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. obj = TestManifest(sandbox, manifest_full_path, manifest, flavor=flavor, install_prefix='%s/' % flavor, relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path))) for test in sorted(manifest.files): obj.tests.append({ 'path': test, 'here': mozpath.dirname(test), 'manifest': manifest_full_path, 'name': mozpath.basename(test), 'head': '', 'tail': '', 'support-files': '', 'subsuite': '', }) yield obj
def __init__(self, source): if isinstance(source, BaseFinder): self._finder = source else: self._finder = FileFinder(source) self.base = self._finder.base self.files = FileRegistry() self.kind = 'flat' self.omnijar = None self.jarlogs = {} self.optimizedjars = False self.compressed = True jars = set() for p, f in self._finder.find('*'): # Skip the precomplete file, which is generated at packaging time. if p == 'precomplete': continue base = mozpath.dirname(p) # If the file is a zip/jar that is not a .xpi, and contains a # chrome.manifest, it is an omnijar. All the files it contains # go in the directory containing the omnijar. Manifests are merged # if there is a corresponding manifest in the directory. if not p.endswith('.xpi') and self._maybe_zip(f) and \ (mozpath.basename(p) == self.omnijar or not self.omnijar): jar = self._open_jar(p, f) if 'chrome.manifest' in jar: self.kind = 'omni' self.omnijar = mozpath.basename(p) self._fill_with_jar(base, jar) continue # If the file is a manifest, scan its entries for some referencing # jar: urls. If there are some, the files contained in the jar they # point to, go under a directory named after the jar. if is_manifest(p): m = self.files[p] if self.files.contains(p) \ else ManifestFile(base) for e in parse_manifest(self.base, p, f.open()): m.add(self._handle_manifest_entry(e, jars)) if self.files.contains(p): continue f = m # If the file is a packed addon, unpack it under a directory named # after the xpi. if p.endswith('.xpi') and self._maybe_zip(f): self._fill_with_jar(p[:-4], self._open_jar(p, f)) continue if not p in jars: self.files.add(p, f)
def register_idl(self, idl, allow_existing=False): """Registers an IDL file with this instance. The IDL file will be built, installed, etc. """ basename = mozpath.basename(idl.source_path) root = mozpath.splitext(basename)[0] xpt = "%s.xpt" % idl.module manifest = mozpath.join(idl.install_target, "components", "interfaces.manifest") chrome_manifest = mozpath.join(idl.install_target, "chrome.manifest") entry = { "source": idl.source_path, "module": idl.module, "basename": basename, "root": root, "manifest": manifest, } if not allow_existing and entry["basename"] in self.idls: raise Exception("IDL already registered: %s" % entry["basename"]) self.idls[entry["basename"]] = entry t = self.modules.setdefault(entry["module"], (idl.install_target, set())) t[1].add(entry["root"]) if idl.add_to_manifest: self.interface_manifests.setdefault(manifest, set()).add(xpt) self.chrome_manifests.add(chrome_manifest)
def register_idl(self, idl, allow_existing=False): """Registers an IDL file with this instance. The IDL file will be built, installed, etc. """ basename = mozpath.basename(idl.source_path) root = mozpath.splitext(basename)[0] xpt = '%s.xpt' % idl.module manifest = mozpath.join(idl.install_target, 'components', 'interfaces.manifest') chrome_manifest = mozpath.join(idl.install_target, 'chrome.manifest') entry = { 'source': idl.source_path, 'module': idl.module, 'basename': basename, 'root': root, 'manifest': manifest, } if not allow_existing and entry['basename'] in self.idls: raise Exception('IDL already registered: %s' % entry['basename']) self.idls[entry['basename']] = entry t = self.modules.setdefault(entry['module'], (idl.install_target, set())) t[1].add(entry['root']) if idl.add_to_manifest: self.interface_manifests.setdefault(manifest, set()).add(xpt) self.chrome_manifests.add(chrome_manifest)
def repackage_installer(topsrcdir, tag, setupexe, package, output): if package and not zipfile.is_zipfile(package): raise Exception("Package file %s is not a valid .zip file." % package) # We need the full path for the tag and output, since we chdir later. tag = mozpath.realpath(tag) output = mozpath.realpath(output) ensureParentDir(output) tmpdir = tempfile.mkdtemp() old_cwd = os.getcwd() try: if package: z = zipfile.ZipFile(package) z.extractall(tmpdir) z.close() # Copy setup.exe into the root of the install dir, alongside the # package. shutil.copyfile(setupexe, mozpath.join(tmpdir, mozpath.basename(setupexe))) # archive_exe requires us to be in the directory where the package is # unpacked (the tmpdir) os.chdir(tmpdir) sfx_package = mozpath.join(topsrcdir, 'other-licenses/7zstub/firefox/7zSD.sfx') package_name = 'firefox' if package else None archive_exe(package_name, tag, sfx_package, output) finally: os.chdir(old_cwd) shutil.rmtree(tmpdir)
def __init__(self, context, source, module, add_to_manifest): ContextDerived.__init__(self, context) self.source_path = source self.basename = mozpath.basename(source) self.module = module self.add_to_manifest = add_to_manifest
def _get_manager_args(self): tmp = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp) cache_dir = mozpath.join(tmp, 'cache') os.mkdir(cache_dir) ip = self._static_input_paths inputs = ( ip, {mozpath.splitext(mozpath.basename(p))[0] for p in ip}, set(), set(), ) return dict( config_path=self._config_path, inputs=inputs, exported_header_dir=mozpath.join(tmp, 'exports'), codegen_dir=mozpath.join(tmp, 'codegen'), state_path=mozpath.join(tmp, 'state.json'), make_deps_path=mozpath.join(tmp, 'codegen.pp'), make_deps_target='codegen.pp', cache_dir=cache_dir, )
def _binding_info(self, p): """Compute binding metadata for an input path. Returns a tuple of: (stem, binding_stem, is_event, output_files) output_files is itself a tuple. The first two items are the binding header and C++ paths, respectively. The 2nd pair are the event header and C++ paths or None if this isn't an event binding. """ basename = mozpath.basename(p) stem = mozpath.splitext(basename)[0] binding_stem = '%sBinding' % stem if stem in self._exported_stems: header_dir = self._exported_header_dir else: header_dir = self._codegen_dir is_event = stem in self._generated_events_stems files = ( mozpath.join(header_dir, '%s.h' % binding_stem), mozpath.join(self._codegen_dir, '%s.cpp' % binding_stem), mozpath.join(header_dir, '%s.h' % stem) if is_event else None, mozpath.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None, ) return stem, binding_stem, is_event, header_dir, files
def iteritems(self): existing_files = self._load_config_track() for f in existing_files: # The track file contains filenames, and the basename is the # variable name. var = mozpath.basename(f) yield var, self[var]
def consume_object(self, obj): if isinstance(obj, TestManifest): for test in obj.tests: self._test_manager.add(test, flavor=obj.flavor, topsrcdir=obj.topsrcdir) elif isinstance(obj, XPIDLFile): self._idl_manager.register_idl(obj.source_path, obj.module) elif isinstance(obj, ConfigFileSubstitution): # Do not handle ConfigFileSubstitution for Makefiles. Leave that # to other if mozpath.basename(obj.output_path) == 'Makefile': return with self._get_preprocessor(obj) as pp: pp.do_include(obj.input_path) self.backend_input_files.add(obj.input_path) elif isinstance(obj, HeaderFileSubstitution): self._create_config_header(obj) self.backend_input_files.add(obj.input_path) else: return obj.ack()
def consume_object(self, obj): self._configs.add(obj.config) if isinstance(obj, TestManifest): for test in obj.tests: self._test_manager.add(test, flavor=obj.flavor, topsrcdir=obj.topsrcdir) elif isinstance(obj, XPIDLFile): self._idl_manager.register_idl(obj) elif isinstance(obj, ConfigFileSubstitution): # Do not handle ConfigFileSubstitution for Makefiles. Leave that # to other if mozpath.basename(obj.output_path) == 'Makefile': return False with self._get_preprocessor(obj) as pp: pp.do_include(obj.input_path) self.backend_input_files.add(obj.input_path) # We should consider aggregating WebIDL types in emitter.py. elif isinstance(obj, WebIDLFile): self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedEventWebIDLFile): self._webidls.generated_events_sources.add(mozpath.join( obj.srcdir, obj.basename)) elif isinstance(obj, TestWebIDLFile): self._webidls.test_sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedTestWebIDLFile): self._webidls.preprocessed_test_sources.add(mozpath.join( obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedWebIDLFile): self._webidls.generated_sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedWebIDLFile): self._webidls.preprocessed_sources.add(mozpath.join( obj.srcdir, obj.basename)) elif isinstance(obj, ExampleWebIDLInterface): self._webidls.example_interfaces.add(obj.name) elif isinstance(obj, IPDLFile): self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, UnifiedSources): if obj.have_unified_mapping: self._write_unified_files(obj.unified_source_mapping, obj.objdir) if hasattr(self, '_process_unified_sources'): self._process_unified_sources(obj) else: return False return True
def consume_object(self, obj): self._configs.add(obj.config) if isinstance(obj, TestManifest): for test in obj.tests: self._test_manager.add(test, flavor=obj.flavor, topsrcdir=obj.topsrcdir) elif isinstance(obj, XPIDLFile): self._idl_manager.register_idl(obj.source_path, obj.module, obj.install_target) elif isinstance(obj, ConfigFileSubstitution): # Do not handle ConfigFileSubstitution for Makefiles. Leave that # to other if mozpath.basename(obj.output_path) == 'Makefile': return with self._get_preprocessor(obj) as pp: pp.do_include(obj.input_path) self.backend_input_files.add(obj.input_path) elif isinstance(obj, HeaderFileSubstitution): self._create_config_header(obj) self.backend_input_files.add(obj.input_path) # We should consider aggregating WebIDL types in emitter.py. elif isinstance(obj, WebIDLFile): self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedEventWebIDLFile): self._webidls.generated_events_sources.add(mozpath.join( obj.srcdir, obj.basename)) elif isinstance(obj, TestWebIDLFile): self._webidls.test_sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedTestWebIDLFile): self._webidls.preprocessed_test_sources.add(mozpath.join( obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedWebIDLFile): self._webidls.generated_sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedWebIDLFile): self._webidls.preprocessed_sources.add(mozpath.join( obj.srcdir, obj.basename)) elif isinstance(obj, ExampleWebIDLInterface): self._webidls.example_interfaces.add(obj.name) elif isinstance(obj, IPDLFile): self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename)) else: return obj.ack()
def __init__(self, context, source, module): ContextDerived.__init__(self, context) self.source_path = source self.basename = mozpath.basename(source) self.module = module self.install_target = context['FINAL_TARGET']
def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj, defines) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, defines=defines) else: self._install_manifests[obj.install_target].add_symlink( f.full_path, mozpath.join(path, mozpath.basename(f)) ) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath(obj.path, obj.install_target) if entry not in self._manifest_entries[top_level]: self._manifest_entries[top_level].append(entry) self._manifest_entries[obj.path].append(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # XPIDL are emitted before Defines, which breaks the assert in the # branch for Defines. OTOH, we don't actually care about the # XPIDLFile objects just yet, so we can just pretend we didn't see # an object in the directory yet. return True else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True
def _process_test_harness_files(self, obj): for path, files in obj.srcdir_files.iteritems(): for source in files: dest = '%s/%s' % (path, mozpath.basename(source)) self._install_manifests['tests'].add_symlink(source, dest) for path, patterns in obj.srcdir_pattern_files.iteritems(): for p in patterns: if p[:1] == '/': self._install_manifests['tests'].add_pattern_symlink(obj.topsrcdir, p, path) else: self._install_manifests['tests'].add_pattern_symlink(obj.srcdir, p, path) for path, files in obj.objdir_files.iteritems(): for source in files: dest = '%s/%s' % (path, mozpath.basename(source)) print(source, mozpath.join(reltarget, dest)) test_manifest.add_symlink(source, mozpath.join(reltarget, dest))
def files_from(ipdl): base = mozpath.basename(ipdl) root, ext = mozpath.splitext(base) # Both .ipdl and .ipdlh become .cpp files files = ["%s.cpp" % root] if ext == ".ipdl": # .ipdl also becomes Child/Parent.cpp files files.extend(["%sChild.cpp" % root, "%sParent.cpp" % root]) return files
def add_manifest(self, entry): # Store manifest entries in a single manifest per directory, named # after their parent directory, except for root manifests, all named # chrome.manifest. if entry.base: name = mozpath.basename(entry.base) else: name = 'chrome' path = mozpath.normpath(mozpath.join(entry.base, '%s.manifest' % name)) if not self.copier.contains(path): # Add a reference to the manifest file in the parent manifest, if # the manifest file is not a root manifest. if entry.base: parent = mozpath.dirname(entry.base) relbase = mozpath.basename(entry.base) relpath = mozpath.join(relbase, mozpath.basename(path)) self.add_manifest(Manifest(parent, relpath)) self.copier.add(path, ManifestFile(entry.base)) self.copier[path].add(entry)
def symlink_rule(self, source, output=None, output_group=None): outputs = [output] if output else [mozpath.basename(source)] if output_group: outputs.append(output_group) # The !tup_ln macro does a symlink or file copy (depending on the # platform) without shelling out to a subprocess. self.rule( cmd=['!tup_ln'], inputs=[source], outputs=outputs, )
def test_test_harness_files(self): reader = self.reader("test-harness-files") objs = self.read_topsrcdir(reader) self.assertEqual(len(objs), 1) self.assertIsInstance(objs[0], TestHarnessFiles) expected = {"mochitest": ["runtests.py", "utils.py"], "testing/mochitest": ["mochitest.py", "mochitest.ini"]} for path, strings in objs[0].srcdir_files.iteritems(): self.assertTrue(path in expected) basenames = sorted(mozpath.basename(s) for s in strings) self.assertEqual(sorted(expected[path]), basenames)
def add(self, path, file): ''' Add the given BaseFile instance with the given path. ''' assert not self._closed if is_manifest(path): self._add_manifest_file(path, file) elif path.endswith('.xpt'): self._queue.append(self.formatter.add_interfaces, path, file) else: self._file_queue.append(self.formatter.add, path, file) if mozpath.basename(path) == 'install.rdf': self._addons.add(mozpath.dirname(path))
def _add_preprocess(self, obj, path, dest, **kwargs): target = mozpath.basename(path) # This matches what PP_TARGETS do in config/rules. if target.endswith('.in'): target = target[:-3] depfile = mozpath.join( self.environment.topobjdir, 'faster', '.deps', mozpath.join(obj.install_target, dest, target).replace('/', '_')) self._install_manifests[obj.install_target].add_preprocess( mozpath.join(obj.srcdir, path), mozpath.join(dest, target), depfile, **kwargs)
def test_test_manifest_inactive_ignored(self): """Inactive tests should not be installed.""" reader = self.reader('test-manifest-inactive-ignored') objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)] self.assertEqual(len(objs), 1) o = objs[0] self.assertEqual(o.flavor, 'mochitest') basenames = set(mozpath.basename(k) for k in o.installs.keys()) self.assertEqual(basenames, {'mochitest.ini', 'test_active.html'})
def find_deps_files(path): """Find paths to Make dependency files. This is an iterator of (objdir, deps_path). """ for root, dirs, files in os.walk(path): root = mozpath.normpath(root) if mozpath.basename(root) != '.deps': continue parent = os.path.dirname(root) for f in files: if f.endswith('.pp'): yield parent, mozpath.join(root, f)
def unify_file(self, path, file1, file2): ''' Unify files taking Mozilla application special cases into account. Otherwise defer to UnifiedFinder.unify_file. ''' basename = mozpath.basename(path) if basename == 'buildconfig.html': content1 = file1.open().readlines() content2 = file2.open().readlines() # Copy everything from the first file up to the end of its <body>, # insert a <hr> between the two files and copy the second file's # content beginning after its leading <h1>. return GeneratedFile(''.join( content1[:content1.index('</body>\n')] + ['<hr> </hr>\n'] + content2[content2.index('<h1>about:buildconfig</h1>\n') + 1:] )) elif basename == 'install.rdf': # install.rdf files often have em:targetPlatform (either as # attribute or as tag) that will differ between platforms. The # unified install.rdf should contain both em:targetPlatforms if # they exist, or strip them if only one file has a target platform. content1, content2 = ( FIND_TARGET_PLATFORM_ATTR.sub(lambda m: \ m.group('tag') + m.group('attrs') + m.group('otherattrs') + '<%stargetPlatform>%s</%stargetPlatform>' % \ (m.group('ns') or "", m.group('platform'), m.group('ns') or ""), f.open().read() ) for f in (file1, file2) ) platform2 = FIND_TARGET_PLATFORM.search(content2) return GeneratedFile(FIND_TARGET_PLATFORM.sub( lambda m: m.group(0) + platform2.group(0) if platform2 else '', content1 )) elif path.endswith('.xpi'): finder1 = JarFinder(os.path.join(self._finder1.base, path), JarReader(fileobj=file1.open())) finder2 = JarFinder(os.path.join(self._finder2.base, path), JarReader(fileobj=file2.open())) unifier = UnifiedFinder(finder1, finder2, sorted=self._sorted) err = errors.count all(unifier.find('')) if err == errors.count: return file1 return None return UnifiedFinder.unify_file(self, path, file1, file2)
def _preprocess(self, backend_file, input_file, destdir=None): cmd = self._py_action('preprocessor') cmd.extend(backend_file.defines) cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o']) base_input = mozpath.basename(input_file) if base_input.endswith('.in'): base_input = mozpath.splitext(base_input)[0] output = mozpath.join(destdir, base_input) if destdir else base_input backend_file.rule( inputs=[input_file], display='Preprocess %o', cmd=cmd, outputs=[output], )
def _walk_hierarchy(self, obj, element, namespace=''): """Walks the ``HierarchicalStringList`` ``element`` in the context of the mozbuild object ``obj`` as though by ``element.walk()``, but yield three-tuple containing the following: - ``source`` - The path to the source file named by the current string - ``dest`` - The relative path, including the namespace, of the destination file. - ``flags`` - A dictionary of flags associated with the current string, or None if there is no such dictionary. """ for path, strings in element.walk(): for s in strings: source = mozpath.normpath(mozpath.join(obj.srcdir, s)) dest = mozpath.join(namespace, path, mozpath.basename(s)) yield source, dest, strings.flags_for(s)
def test_test_harness_files(self): reader = self.reader('test-harness-files') objs = self.read_topsrcdir(reader) self.assertEqual(len(objs), 1) self.assertIsInstance(objs[0], TestHarnessFiles) expected = { 'mochitest': ['runtests.py', 'utils.py'], 'testing/mochitest': ['mochitest.py', 'mochitest.ini'], } for path, strings in objs[0].files.walk(): self.assertTrue(path in expected) basenames = sorted(mozpath.basename(s) for s in strings) self.assertEqual(sorted(expected[path]), basenames)
def _handle_idl_manager(self, manager): if self.environment.is_artifact_build: return backend_file = self._get_backend_file('xpcom/xpidl') backend_file.export_shell() all_idl_directories = set() all_idl_directories.update( *map(lambda x: x[1], manager.modules.itervalues())) all_xpts = [] for module, (idls, _) in sorted(manager.modules.iteritems()): cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '-I$(IDL_PARSER_DIR)', '-I$(IDL_PARSER_CACHE_DIR)', '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py', '--cache-dir', '$(IDL_PARSER_CACHE_DIR)', '--bindings-conf', '$(topsrcdir)/dom/bindings/Bindings.conf', ] for d in all_idl_directories: cmd.extend(['-I', d]) cmd.extend([ '$(DIST)/include', '$(DIST)/xpcrs', '.', module, ]) cmd.extend(sorted(idls)) all_xpts.append('$(MOZ_OBJ_ROOT)/%s/%s.xpt' % (backend_file.relobjdir, module)) outputs = ['%s.xpt' % module] stems = sorted( mozpath.splitext(mozpath.basename(idl))[0] for idl in idls) outputs.extend( ['$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in stems]) outputs.extend( ['$(MOZ_OBJ_ROOT)/dist/xpcrs/rt/%s.rs' % f for f in stems]) outputs.extend( ['$(MOZ_OBJ_ROOT)/dist/xpcrs/bt/%s.rs' % f for f in stems]) backend_file.rule( inputs=[ '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py', '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py', self._installed_idls, ], display='XPIDL %s' % module, cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) cpp_backend_file = self._get_backend_file('xpcom/reflect/xptinfo') cpp_backend_file.export_shell() cpp_backend_file.rule( inputs=all_xpts, display='XPIDL xptcodegen.py %o', cmd=[ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '$(topsrcdir)/xpcom/reflect/xptinfo/xptcodegen.py', '%o', '%f', ], outputs=['xptdata.cpp'], check_unchanged=True, )
def consume_object(self, obj): self._configs.add(obj.config) if isinstance(obj, XPIDLModule): # TODO bug 1240134 tracks not processing XPIDL files during # artifact builds. self._idl_manager.link_module(obj) elif isinstance(obj, ConfigFileSubstitution): # Do not handle ConfigFileSubstitution for Makefiles. Leave that # to other if mozpath.basename(obj.output_path) == 'Makefile': return False with self._get_preprocessor(obj) as pp: pp.do_include(obj.input_path) self.backend_input_files.add(obj.input_path) elif isinstance(obj, WebIDLCollection): self._handle_webidl_collection(obj) elif isinstance(obj, IPDLCollection): self._handle_generated_sources( mozpath.join(obj.objdir, f) for f in obj.all_generated_sources()) self._write_unified_files(obj.unified_source_mapping, obj.objdir, poison_windows_h=False) self._handle_ipdl_sources( obj.objdir, list(sorted(obj.all_sources())), list(sorted(obj.all_preprocessed_sources())), list(sorted(obj.all_regular_sources())), obj.unified_source_mapping) elif isinstance(obj, XPCOMComponentManifests): self._handle_xpcom_collection(obj) elif isinstance(obj, UnifiedSources): # Unified sources aren't relevant to artifact builds. if self.environment.is_artifact_build: return True if obj.have_unified_mapping: self._write_unified_files(obj.unified_source_mapping, obj.objdir) if hasattr(self, '_process_unified_sources'): self._process_unified_sources(obj) elif isinstance(obj, BaseProgram): self._binaries.programs.append(obj) return False elif isinstance(obj, SharedLibrary): self._binaries.shared_libraries.append(obj) return False elif isinstance(obj, (GeneratedSources, HostGeneratedSources)): self._handle_generated_sources(obj.files) return False elif isinstance(obj, GeneratedFile): if obj.required_during_compile or obj.required_before_compile: for f in itertools.chain(obj.required_before_compile, obj.required_during_compile): fullpath = ObjDirPath(obj._context, '!' + f).full_path self._handle_generated_sources([fullpath]) return False elif isinstance(obj, Exports): objdir_files = [ f.full_path for path, files in obj.files.walk() for f in files if isinstance(f, ObjDirPath) ] if objdir_files: self._handle_generated_sources(objdir_files) return False elif isinstance(obj, GnProjectData): # These are only handled by special purpose build backends, # ignore them here. return True else: return False return True
def _get_crate_name(crate_path): try: with open(mozpath.join(crate_path, "Cargo.toml")) as f: return pytoml.load(f)["package"]["name"] except: return mozpath.basename(crate_path)
def write(self, line): # A Preprocessor instance feeds the parser through calls to this method. # Ignore comments and empty lines if self.ignore.match(line): return # A jar manifest file can declare several different sections, each of # which applies to a given "jar file". Each of those sections starts # with "<name>.jar:". if self._current_jar is None: m = self.jarline.match(line) if not m: raise RuntimeError(line) if m.group('jarfile'): self._current_jar = JarInfo(m.group('jarfile')) self._jars.append(self._current_jar) return # Within each section, there can be three different types of entries: # - indications of the relative source directory we pretend to be in # when considering localization files, in the following form; # "relativesrcdir <path>:" m = self.relsrcline.match(line) if m: if self._current_jar.chrome_manifests or self._current_jar.entries: self._current_jar = JarInfo(self._current_jar.name) self._jars.append(self._current_jar) self._current_jar.relativesrcdir = m.group('relativesrcdir') return # - chrome manifest entries, prefixed with "%". m = self.regline.match(line) if m: rline = m.group(1) if rline not in self._current_jar.chrome_manifests: self._current_jar.chrome_manifests.append(rline) return # - entries indicating files to be part of the given jar. They are # formed thusly: # "<dest_path>" # or # "<dest_path> (<source_path>)" # The <dest_path> is where the file(s) will be put in the chrome jar. # The <source_path> is where the file(s) can be found in the source # directory. The <source_path> may start with a "%" for files part # of a localization directory, in which case the "%" counts as the # locale. # Each entry can be prefixed with "*" for preprocessing and "+" to # always overwrite the destination independently of file timestamps # (the usefulness of the latter is dubious in the modern days). m = self.entryline.match(line) if m: self._current_jar.entries.append( JarManifestEntry( m.group('output'), m.group('source') or mozpath.basename(m.group('output')), is_locale=bool(m.group('locale')), preprocess=bool(m.group('optPreprocess')), overwrite=bool(m.group('optOverwrite')), )) return self._current_jar = None self.write(line)
def consume_object(self, obj): self._configs.add(obj.config) if isinstance(obj, TestManifest): for test in obj.tests: self._test_manager.add(test, obj.flavor, obj.topsrcdir, obj.default_support_files) self._test_manager.add_installs(obj, obj.topsrcdir) elif isinstance(obj, XPIDLFile): # TODO bug 1240134 tracks not processing XPIDL files during # artifact builds. self._idl_manager.register_idl(obj) elif isinstance(obj, ConfigFileSubstitution): # Do not handle ConfigFileSubstitution for Makefiles. Leave that # to other if mozpath.basename(obj.output_path) == 'Makefile': return False with self._get_preprocessor(obj) as pp: pp.do_include(obj.input_path) self.backend_input_files.add(obj.input_path) # We should consider aggregating WebIDL types in emitter.py. elif isinstance(obj, WebIDLFile): # WebIDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedEventWebIDLFile): # WebIDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._webidls.generated_events_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, TestWebIDLFile): # WebIDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._webidls.test_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedTestWebIDLFile): # WebIDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._webidls.preprocessed_test_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedWebIDLFile): # WebIDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._webidls.generated_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedWebIDLFile): # WebIDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._webidls.preprocessed_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, ExampleWebIDLInterface): # WebIDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._webidls.example_interfaces.add(obj.name) elif isinstance(obj, IPDLFile): # IPDL isn't relevant to artifact builds. if self.environment.is_artifact_build: return True self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, UnifiedSources): # Unified sources aren't relevant to artifact builds. if self.environment.is_artifact_build: return True if obj.have_unified_mapping: self._write_unified_files(obj.unified_source_mapping, obj.objdir) if hasattr(self, '_process_unified_sources'): self._process_unified_sources(obj) elif isinstance(obj, BaseProgram): self._binaries.programs.append(obj) return False elif isinstance(obj, SharedLibrary): self._binaries.shared_libraries.append(obj) return False else: return False return True
def test_basename(self): self.assertEqual(basename('foo/bar/baz'), 'baz') self.assertEqual(basename('foo/bar'), 'bar') self.assertEqual(basename('foo'), 'foo') self.assertEqual(basename('foo/bar/'), '')
def write(self, line): # A Preprocessor instance feeds the parser through calls to this method. # Ignore comments and empty lines if self.ignore.match(line): return # A jar manifest file can declare several different sections, each of # which applies to a given "jar file". Each of those sections starts # with "<name>.jar:", in which case the path is assumed relative to # a "chrome" directory, or "[<base/path>] <subpath/name>.jar:", where # a base directory is given (usually pointing at the root of the # application or addon) and the jar path is given relative to the base # directory. if self._current_jar is None: m = self.jarline.match(line) if not m: raise RuntimeError(line) if m.group('jarfile'): self._current_jar = JarInfo(m.group('base'), m.group('jarfile')) self._jars.append(self._current_jar) return # Within each section, there can be three different types of entries: # - indications of the relative source directory we pretend to be in # when considering localization files, in the following form; # "relativesrcdir <path>:" m = self.relsrcline.match(line) if m: if self._current_jar.chrome_manifests or self._current_jar.entries: self._current_jar = JarInfo(self._current_jar) self._jars.append(self._current_jar) self._current_jar.relativesrcdir = m.group('relativesrcdir') return # - chrome manifest entries, prefixed with "%". m = self.regline.match(line) if m: rline = ' '.join(m.group(1).split()) if rline not in self._current_jar.chrome_manifests: self._current_jar.chrome_manifests.append(rline) return # - entries indicating files to be part of the given jar. They are # formed thusly: # "<dest_path>" # or # "<dest_path> (<source_path>)" # The <dest_path> is where the file(s) will be put in the chrome jar. # The <source_path> is where the file(s) can be found in the source # directory. The <source_path> may start with a "%" for files part # of a localization directory, in which case the "%" counts as the # locale. # Each entry can be prefixed with "*" for preprocessing. m = self.entryline.match(line) if m: if m.group('optOverwrite'): raise DeprecatedJarManifest( 'The "+" prefix is not supported anymore') self._current_jar.entries.append( JarManifestEntry( m.group('output'), m.group('source') or mozpath.basename(m.group('output')), is_locale=bool(m.group('locale')), preprocess=bool(m.group('optPreprocess')), )) return self._current_jar = None self.write(line)
def _format_statements_for_generated_file(self, obj, tier, extra_dependencies=''): """Return the list of statements to write to the Makefile for this GeneratedFile. This function will invoke _format_generated_file_input_name and _format_generated_file_output_name to munge the input/output filenames before sending them to the output. """ assert isinstance(obj, GeneratedFile) # Localized generated files can use {AB_CD} and {AB_rCD} in their # output paths. if obj.localized: substs = {'AB_CD': '$(AB_CD)', 'AB_rCD': '$(AB_rCD)'} else: substs = {} outputs = [] needs_AB_rCD = False for o in obj.outputs: needs_AB_rCD = needs_AB_rCD or ('AB_rCD' in o) try: outputs.append( self._format_generated_file_output_name( o.format(**substs), obj)) except KeyError as e: raise ValueError( '%s not in %s is not a valid substitution in %s' % (e.args[0], ', '.join(sorted(substs.keys())), o)) first_output = outputs[0] dep_file = mozpath.join(mozpath.dirname(first_output), "$(MDDEPDIR)", "%s.pp" % mozpath.basename(first_output)) # The stub target file needs to go in MDDEPDIR so that it doesn't # get written into generated Android resource directories, breaking # Gradle tooling and/or polluting the Android packages. stub_file = mozpath.join(mozpath.dirname(first_output), "$(MDDEPDIR)", "%s.stub" % mozpath.basename(first_output)) if obj.inputs: inputs = [ self._format_generated_file_input_name(f, obj) for f in obj.inputs ] else: inputs = [] force = '' if obj.force: force = ' FORCE' elif obj.localized: force = ' $(if $(IS_LANGUAGE_REPACK),FORCE)' ret = [] if obj.script: # If we are doing an artifact build, we don't run compiler, so # we can skip generated files that are needed during compile, # or let the rule run as the result of something depending on # it. if not (obj.required_before_compile or obj.required_during_compile) or \ not self.environment.is_artifact_build: if tier and not needs_AB_rCD: # Android localized resources have special Makefile # handling. ret.append( '%s%s: %s' % (tier, ':' if tier != 'default' else '', stub_file)) for output in outputs: ret.append('%s: %s ;' % (output, stub_file)) ret.append('GARBAGE += %s' % output) ret.append('GARBAGE += %s' % stub_file) ret.append('EXTRA_MDDEPEND_FILES += %s' % dep_file) ret.append(( """{stub}: {script}{inputs}{backend}{force} \t$(REPORT_BUILD) \t$(call py_action,file_generate,{locale}{script} """ # wrap for E501 """{method} {output} {dep_file} {stub}{inputs}{flags}) \t@$(TOUCH) $@ """ ).format( stub=stub_file, output=first_output, dep_file=dep_file, inputs=' ' + ' '.join(inputs) if inputs else '', flags=' ' + ' '.join(shell_quote(f) for f in obj.flags) if obj.flags else '', backend=' ' + extra_dependencies if extra_dependencies else '', # Locale repacks repack multiple locales from a single configured objdir, # so standard mtime dependencies won't work properly when the build is re-run # with a different locale as input. IS_LANGUAGE_REPACK will reliably be set # in this situation, so simply force the generation to run in that case. force=force, locale='--locale=$(AB_CD) ' if obj.localized else '', script=obj.script, method=obj.method)) return ret
def convert_support_files(self, test, install_root, manifest_dir, out_dir): # Arguments: # test - The test object to process. # install_root - The directory under $objdir/_tests that will contain # the tests for this harness (examples are "testing/mochitest", # "xpcshell"). # manifest_dir - Absoulute path to the (srcdir) directory containing the # manifest that included this test # out_dir - The path relative to $objdir/_tests used as the destination for the # test, based on the relative path to the manifest in the srcdir, # the install_root, and 'install-to-subdir', if present in the manifest. info = TestInstallInfo() for field, seen in self._fields: value = test.get(field, '') for pattern in value.split(): # We track uniqueness locally (per test) where duplicates are forbidden, # and globally, where they are permitted. If a support file appears multiple # times for a single test, there are unnecessary entries in the manifest. But # many entries will be shared across tests that share defaults. # We need to memoize on the basis of both the path and the output # directory for the benefit of tests specifying 'install-to-subdir'. key = field, pattern, out_dir if key in info.seen: raise ValueError( "%s appears multiple times in a test manifest under a %s field," " please omit the duplicate entry." % (pattern, field)) info.seen.add(key) if key in seen: continue seen.add(key) if field == 'generated-files': info.external_installs.add( mozpath.normpath(mozpath.join(out_dir, pattern))) # '!' indicates our syntax for inter-directory support file # dependencies. These receive special handling in the backend. elif pattern[0] == '!': info.deferred_installs.add(pattern) # We only support globbing on support-files because # the harness doesn't support * for head. elif '*' in pattern and field == 'support-files': info.pattern_installs.append( (manifest_dir, pattern, out_dir)) # "absolute" paths identify files that are to be # placed in the install_root directory (no globs) elif pattern[0] == '/': full = mozpath.normpath( mozpath.join(manifest_dir, mozpath.basename(pattern))) info.installs.append( (full, mozpath.join(install_root, pattern[1:]))) else: full = mozpath.normpath(mozpath.join( manifest_dir, pattern)) dest_path = mozpath.join(out_dir, pattern) # If the path resolves to a different directory # tree, we take special behavior depending on the # entry type. if not full.startswith(manifest_dir): # If it's a support file, we install the file # into the current destination directory. # This implementation makes installing things # with custom prefixes impossible. If this is # needed, we can add support for that via a # special syntax later. if field == 'support-files': dest_path = mozpath.join(out_dir, os.path.basename(pattern)) # If it's not a support file, we ignore it. # This preserves old behavior so things like # head files doesn't get installed multiple # times. else: continue info.installs.append((full, mozpath.normpath(dest_path))) return info
def all_basenames(self): return [mozpath.basename(source) for source in self.all_sources()]
def generated_events_basenames(self): return [mozpath.basename(s) for s in self.generated_events_sources]
def __init__(self, sandbox, source, module): SandboxDerived.__init__(self, sandbox) self.source_path = source self.basename = mozpath.basename(source) self.module = module
def test_test_manifest_keys_extracted(self): """Ensure all metadata from test manifests is extracted.""" reader = self.reader('test-manifest-keys-extracted') objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)] self.assertEqual(len(objs), 6) metadata = { 'a11y.ini': { 'flavor': 'a11y', 'installs': { 'a11y.ini': False, 'test_a11y.js': True, }, 'pattern-installs': 1, }, 'browser.ini': { 'flavor': 'browser-chrome', 'installs': { 'browser.ini': False, 'test_browser.js': True, 'support1': False, 'support2': False, }, }, 'metro.ini': { 'flavor': 'metro-chrome', 'installs': { 'metro.ini': False, 'test_metro.js': True, }, }, 'mochitest.ini': { 'flavor': 'mochitest', 'installs': { 'mochitest.ini': False, 'test_mochitest.js': True, }, 'external': { 'external1', 'external2', }, }, 'chrome.ini': { 'flavor': 'chrome', 'installs': { 'chrome.ini': False, 'test_chrome.js': True, }, }, 'xpcshell.ini': { 'flavor': 'xpcshell', 'dupe': True, 'installs': { 'xpcshell.ini': False, 'test_xpcshell.js': True, 'head1': False, 'head2': False, 'tail1': False, 'tail2': False, }, }, } for o in objs: m = metadata[mozpath.basename(o.manifest_relpath)] self.assertTrue(o.path.startswith(o.directory)) self.assertEqual(o.flavor, m['flavor']) self.assertEqual(o.dupe_manifest, m.get('dupe', False)) external_normalized = set(mozpath.basename(p) for p in o.external_installs) self.assertEqual(external_normalized, m.get('external', set())) self.assertEqual(len(o.installs), len(m['installs'])) for path in o.installs.keys(): self.assertTrue(path.startswith(o.directory)) relpath = path[len(o.directory)+1:] self.assertIn(relpath, m['installs']) self.assertEqual(o.installs[path][1], m['installs'][relpath]) if 'pattern-installs' in m: self.assertEqual(len(o.pattern_installs), m['pattern-installs'])
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output, non_unified_sources, action_overrides): flat_list, targets, data = gyp_result no_chromium = gyp_dir_attrs.no_chromium no_unified = gyp_dir_attrs.no_unified # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs.get('MOZ_DEBUG') else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if 'actions' in spec: handle_actions(spec['actions'], context, action_overrides) if 'copies' in spec: handle_copies(spec['copies'], context) use_libs = [] libs = [] def add_deps(s): for t in s.get('dependencies', []) + s.get('dependencies_original', []): ty = targets[t]['type'] if ty in ('static_library', 'shared_library'): l = targets[t]['target_name'] if l not in use_libs: use_libs.append(l) # Manually expand out transitive dependencies-- # gyp won't do this for static libs or none targets. if ty in ('static_library', 'none'): add_deps(targets[t]) libs.extend(spec.get('libraries', [])) # XXX: this sucks, but webrtc breaks with this right now because # it builds a library called 'gtest' and we just get lucky # that it isn't in USE_LIBS by that name anywhere. if no_chromium: add_deps(spec) os_libs = [] for l in libs: if l.startswith('-'): if l not in os_libs: os_libs.append(l) elif l.endswith('.lib'): l = l[:-4] if l not in os_libs: os_libs.append(l) elif l: # For library names passed in from moz.build. l = os.path.basename(l) if l not in use_libs: use_libs.append(l) if spec['type'] == 'none': if not ('actions' in spec or 'copies' in spec): continue elif spec['type'] in ('static_library', 'shared_library', 'executable'): # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if spec['type'] in ('static_library', 'shared_library'): if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') else: context['PROGRAM'] = name.decode('utf-8') if spec['type'] == 'shared_library': context['FORCE_SHARED_LIB'] = True elif spec['type'] == 'static_library' and \ spec.get('variables', {}).get('no_expand_libs', '0') == '1': # PSM links a NSS static library, but our folded libnss # doesn't actually export everything that all of the # objects within would need, so that one library # should be built as a real static library. context['NO_EXPAND_LIBS'] = True if use_libs: context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower()) if os_libs: context['OS_LIBS'] = os_libs # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) if f.startswith('$INTERMEDIATE_DIR/'): s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!')) else: s = SourcePath(context, f) if ext == '.h': continue if ext == '.def': context['SYMBOLS_FILE'] = s elif ext != '.S' and not no_unified and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) defines = target_conf.get('defines', []) if config.substs['CC_TYPE'] == 'clang-cl' and no_chromium: msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) defines.extend(msvs_settings.GetComputedDefines(c)) for define in defines: if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True product_dir_dist = '$PRODUCT_DIR/dist/' for include in target_conf.get('include_dirs', []): if include.startswith(product_dir_dist): # special-case includes of <(PRODUCT_DIR)/dist/ to match # handle_copies above. This is used for NSS' exports. include = '!/dist/include/' + include[len(product_dir_dist ):] elif include.startswith(config.topobjdir): # NSPR_INCLUDE_DIR gets passed into the NSS build this way. include = '!/' + mozpath.relpath(include, config.topobjdir) else: # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. # # We do allow !- and %-prefixed paths, assuming they come # from moz.build and will be handled the same way as if they # were given to LOCAL_INCLUDES in moz.build. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) elif not include.startswith(('!', '%')): resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not include.startswith( ('!', '%')) and not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context['ASFLAGS'] += ['-D' + d for d in defines] if config.substs['OS_TARGET'] == 'SunOS': context['LDFLAGS'] = target_conf.get('ldflags', []) flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context[var].append(f) else: context[var].extend(f) else: # Ignore other types because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) if not no_chromium: # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['COMPILE_FLAGS']['OS_INCLUDES'] = [] for key, value in gyp_dir_attrs.sandbox_vars.items(): if context.get(key) and isinstance(context[key], list): # If we have a key from sanbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context[key] = value + context[key] elif context.get(key) and isinstance(context[key], dict): context[key].update(value) else: context[key] = value yield context
def source_files(self): for srcs in self.sources.values(): for f in srcs: if mozpath.basename(mozpath.splitext(f)[0]) == mozpath.splitext(self.program)[0]: return [f] return []
def generate_build_files(self): """Generate files required for the build. This function is in charge of generating all the .h/.cpp files derived from input .webidl files. Please note that there are build actions required to produce .webidl files and these build actions are explicitly not captured here: this function assumes all .webidl files are present and up to date. This routine is called as part of the build to ensure files that need to exist are present and up to date. This routine may not be called if the build dependencies (generated as a result of calling this the first time) say everything is up to date. Because reprocessing outputs for every .webidl on every invocation is expensive, we only regenerate the minimal set of files on every invocation. The rules for deciding what needs done are roughly as follows: 1. If any .webidl changes, reparse all .webidl files and regenerate the global derived files. Only regenerate output files (.h/.cpp) impacted by the modified .webidl files. 2. If an non-.webidl dependency (Python files, config file) changes, assume everything is out of date and regenerate the world. This is because changes in those could globally impact every output file. 3. If an output file is missing, ensure it is present by performing necessary regeneration. """ # Despite #1 above, we assume the build system is smart enough to not # invoke us if nothing has changed. Therefore, any invocation means # something has changed. And, if anything has changed, we need to # parse the WebIDL. self._parse_webidl() result = BuildResult() # If we parse, we always update globals - they are cheap and it is # easier that way. created, updated, unchanged = self._write_global_derived() result.created |= created result.updated |= updated result.unchanged |= unchanged # If any of the extra dependencies changed, regenerate the world. global_changed, global_hashes = self._global_dependencies_changed() if global_changed: # Make a copy because we may modify. changed_inputs = set(self._input_paths) else: changed_inputs = self._compute_changed_inputs() self._state['global_depends'] = global_hashes # Generate bindings from .webidl files. for filename in sorted(changed_inputs): basename = mozpath.basename(filename) result.inputs.add(filename) written, deps = self._generate_build_files_for_webidl(filename) result.created |= written[0] result.updated |= written[1] result.unchanged |= written[2] self._state['webidls'][basename] = dict( filename=filename, outputs=written[0] | written[1] | written[2], inputs=set(deps), sha1=self._input_hashes[filename], ) # Process some special interfaces required for testing. for interface in self._example_interfaces: written = self.generate_example_files(interface) result.created |= written[0] result.updated |= written[1] result.unchanged |= written[2] # Generate a make dependency file. if self._make_deps_path: mk = Makefile() codegen_rule = mk.create_rule([self._make_deps_target]) codegen_rule.add_dependencies(global_hashes.keys()) codegen_rule.add_dependencies(self._input_paths) with FileAvoidWrite(self._make_deps_path) as fh: mk.dump(fh) self._save_state() return result
def copy(self, dest, skip_if_older=True): ''' Pack all registered files in the given destination jar. The given destination jar may be a path to jar file, or a Dest instance for a jar file. If the destination jar file exists, its (compressed) contents are used instead of the registered BaseFile instances when appropriate. ''' class DeflaterDest(Dest): ''' Dest-like class, reading from a file-like object initially, but switching to a Deflater object if written to. dest = DeflaterDest(original_file) dest.read() # Reads original_file dest.write(data) # Creates a Deflater and write data there dest.read() # Re-opens the Deflater and reads from it ''' def __init__(self, orig=None, compress=True): self.mode = None self.deflater = orig self.compress = compress def read(self, length=-1): if self.mode != 'r': assert self.mode is None self.mode = 'r' return self.deflater.read(length) def write(self, data): if self.mode != 'w': from mozpack.mozjar import Deflater self.deflater = Deflater(self.compress) self.mode = 'w' self.deflater.write(data) def exists(self): return self.deflater is not None if isinstance(dest, basestring): dest = Dest(dest) assert isinstance(dest, Dest) from mozpack.mozjar import JarWriter, JarReader, JAR_BROTLI try: old_jar = JarReader(fileobj=dest) except Exception: old_jar = [] old_contents = dict([(f.filename, f) for f in old_jar]) with JarWriter(fileobj=dest, compress=self.compress, optimize=self.optimize) as jar: for path, file in self: compress = self._compress_options.get(path, self.compress) # Temporary: Because l10n repacks can't handle brotli just yet, # but need to be able to decompress those files, per # UnpackFinder and formatters, we force deflate on them. if compress == JAR_BROTLI and (isinstance(file, ManifestFile) or mozpath.basename(path) == 'install.rdf'): compress = True # If the added content already comes from a jar file, we just add # the raw data from the original jar file to the new one. if isinstance(file, DeflatedFile): jar.add(path, file.file, mode=file.mode, compress=file.file.compress) continue # If the file is already in the old contents for this jar, # we avoid compressing when the contents match, which requires # decompressing the old content. But for e.g. l10n repacks, # which can't decompress brotli, we skip this. elif path in old_contents and old_contents[ path].compress != JAR_BROTLI: deflater = DeflaterDest(old_contents[path], compress) else: deflater = DeflaterDest(compress=compress) file.copy(deflater, skip_if_older) jar.add(path, deflater.deflater, mode=file.mode, compress=compress) if self._preload: jar.preload(self._preload)
def consume_object(self, obj): self._configs.add(obj.config) if isinstance(obj, TestManifest): for test in obj.tests: self._test_manager.add(test, flavor=obj.flavor, topsrcdir=obj.topsrcdir) elif isinstance(obj, XPIDLFile): self._idl_manager.register_idl(obj) elif isinstance(obj, ConfigFileSubstitution): # Do not handle ConfigFileSubstitution for Makefiles. Leave that # to other if mozpath.basename(obj.output_path) == 'Makefile': return with self._get_preprocessor(obj) as pp: pp.do_include(obj.input_path) self.backend_input_files.add(obj.input_path) elif isinstance(obj, HeaderFileSubstitution): self._create_config_header(obj) self.backend_input_files.add(obj.input_path) # We should consider aggregating WebIDL types in emitter.py. elif isinstance(obj, WebIDLFile): self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedEventWebIDLFile): self._webidls.generated_events_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, TestWebIDLFile): self._webidls.test_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedTestWebIDLFile): self._webidls.preprocessed_test_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, GeneratedWebIDLFile): self._webidls.generated_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, PreprocessedWebIDLFile): self._webidls.preprocessed_sources.add( mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, ExampleWebIDLInterface): self._webidls.example_interfaces.add(obj.name) elif isinstance(obj, IPDLFile): self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename)) elif isinstance(obj, UnifiedSources): if obj.have_unified_mapping: self._write_unified_files(obj.unified_source_mapping, obj.objdir) if hasattr(self, '_process_unified_sources'): self._process_unified_sources(obj) else: return obj.ack()
def _consume_jar_manifest(self, obj, defines): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() pp.context.update(defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() pp.do_include(obj.path) for jarinfo in pp.out: install_target = obj.install_target if jarinfo.base: install_target = mozpath.join(install_target, jarinfo.base) for e in jarinfo.entries: if e.is_locale: src = mozpath.join( jarinfo.relativesrcdir or mozpath.dirname(obj.path), 'en-US', e.source) elif e.source.startswith('/'): src = mozpath.join(self.environment.topsrcdir, e.source[1:]) else: src = mozpath.join(mozpath.dirname(obj.path), e.source) if '*' in e.source: if e.preprocess: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) self._install_manifests[install_target] \ .add_pattern_symlink( prefix, src[len(prefix):], mozpath.join(jarinfo.name, e.output)) continue if not os.path.exists(src): if e.is_locale: raise Exception('%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = mozpath.join(self.environment.topobjdir, e.source[1:]) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = mozpath.join(obj.objdir, e.source) self._dependencies['install-%s' % install_target] \ .append(mozpath.relpath( src, self.environment.topobjdir)) if e.preprocess: kwargs = {} if src.endswith('.css'): kwargs['marker'] = '%' self._add_preprocess(obj, src, mozpath.join( jarinfo.name, mozpath.dirname(e.output)), mozpath.basename(e.output), defines=defines, **kwargs) else: self._install_manifests[install_target].add_symlink( src, mozpath.join(jarinfo.name, e.output)) manifest = mozpath.normpath( mozpath.join(install_target, jarinfo.name)) manifest += '.manifest' for m in jarinfo.chrome_manifests: self._manifest_entries[manifest].append( m.replace('%', mozpath.basename(jarinfo.name) + '/')) if jarinfo.name != 'chrome': manifest = mozpath.normpath( mozpath.join(install_target, 'chrome.manifest')) entry = 'manifest %s.manifest' % jarinfo.name if entry not in self._manifest_entries[manifest]: self._manifest_entries[manifest].append(entry)
def add_idls(self, idls): self.idl_files.update(idl.full_path for idl in idls) self.directories.update( mozpath.dirname(idl.full_path) for idl in idls) self._stems.update( mozpath.splitext(mozpath.basename(idl))[0] for idl in idls)
def _process_test_manifest(self, context, info, manifest_path): flavor, install_root, install_subdir, package_tests = info manifest_path = mozpath.normpath(manifest_path) path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path)) manifest_dir = mozpath.dirname(path) manifest_reldir = mozpath.dirname( mozpath.relpath(path, context.config.topsrcdir)) install_prefix = mozpath.join(install_root, install_subdir) try: m = manifestparser.TestManifest(manifests=[path], strict=True) defaults = m.manifest_defaults[os.path.normpath(path)] if not m.tests and not 'support-files' in defaults: raise SandboxValidationError('Empty test manifest: %s' % path, context) obj = TestManifest(context, path, m, flavor=flavor, install_prefix=install_prefix, relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), dupe_manifest='dupe-manifest' in defaults) filtered = m.tests # Jetpack add-on tests are expected to be generated during the # build process so they won't exist here. if flavor != 'jetpack-addon': missing = [ t['name'] for t in filtered if not os.path.exists(t['path']) ] if missing: raise SandboxValidationError( 'Test manifest (%s) lists ' 'test that does not exist: %s' % (path, ', '.join(missing)), context) out_dir = mozpath.join(install_prefix, manifest_reldir) if 'install-to-subdir' in defaults: # This is terrible, but what are you going to do? out_dir = mozpath.join(out_dir, defaults['install-to-subdir']) obj.manifest_obj_relpath = mozpath.join( manifest_reldir, defaults['install-to-subdir'], mozpath.basename(path)) # "head" and "tail" lists. # All manifests support support-files. # # Keep a set of already seen support file patterns, because # repeatedly processing the patterns from the default section # for every test is quite costly (see bug 922517). extras = (('head', set()), ('tail', set()), ('support-files', set())) def process_support_files(test): for thing, seen in extras: value = test.get(thing, '') if value in seen: continue seen.add(value) for pattern in value.split(): # We only support globbing on support-files because # the harness doesn't support * for head and tail. if '*' in pattern and thing == 'support-files': obj.pattern_installs.append( (manifest_dir, pattern, out_dir)) # "absolute" paths identify files that are to be # placed in the install_root directory (no globs) elif pattern[0] == '/': full = mozpath.normpath( mozpath.join(manifest_dir, mozpath.basename(pattern))) obj.installs[full] = (mozpath.join( install_root, pattern[1:]), False) else: full = mozpath.normpath( mozpath.join(manifest_dir, pattern)) dest_path = mozpath.join(out_dir, pattern) # If the path resolves to a different directory # tree, we take special behavior depending on the # entry type. if not full.startswith(manifest_dir): # If it's a support file, we install the file # into the current destination directory. # This implementation makes installing things # with custom prefixes impossible. If this is # needed, we can add support for that via a # special syntax later. if thing == 'support-files': dest_path = mozpath.join( out_dir, os.path.basename(pattern)) # If it's not a support file, we ignore it. # This preserves old behavior so things like # head files doesn't get installed multiple # times. else: continue obj.installs[full] = (mozpath.normpath(dest_path), False) for test in filtered: obj.tests.append(test) # Some test files are compiled and should not be copied into the # test package. They function as identifiers rather than files. if package_tests: obj.installs[mozpath.normpath(test['path'])] = \ (mozpath.join(out_dir, test['relpath']), True) process_support_files(test) if not filtered: # If there are no tests, look for support-files under DEFAULT. process_support_files(defaults) # We also copy manifests into the output directory, # including manifests from [include:foo] directives. for mpath in m.manifests(): mpath = mozpath.normpath(mpath) out_path = mozpath.join(out_dir, mozpath.basename(mpath)) obj.installs[mpath] = (out_path, False) # Some manifests reference files that are auto generated as # part of the build or shouldn't be installed for some # reason. Here, we prune those files from the install set. # FUTURE we should be able to detect autogenerated files from # other build metadata. Once we do that, we can get rid of this. for f in defaults.get('generated-files', '').split(): # We re-raise otherwise the stack trace isn't informative. try: del obj.installs[mozpath.join(manifest_dir, f)] except KeyError: raise SandboxValidationError( 'Error processing test ' 'manifest %s: entry in generated-files not present ' 'elsewhere in manifest: %s' % (path, f), context) obj.external_installs.add(mozpath.join(out_dir, f)) yield obj except (AssertionError, Exception): raise SandboxValidationError( 'Error processing test ' 'manifest file %s: %s' % (path, '\n'.join(traceback.format_exception(*sys.exc_info()))), context)
def _consume_jar_manifest(self, obj): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() if obj.defines: pp.context.update(obj.defines.defines) pp.context.update(self.environment.defines) ab_cd = obj.config.substs['MOZ_UI_LOCALE'][0] pp.context.update(AB_CD=ab_cd, ) pp.out = JarManifestParser() try: pp.do_include(obj.path.full_path) except DeprecatedJarManifest as e: raise DeprecatedJarManifest( 'Parsing error while processing %s: %s' % (obj.path.full_path, e.message)) self.backend_input_files |= pp.includes for jarinfo in pp.out: jar_context = Context(allowed_variables=VARIABLES, config=obj._context.config) jar_context.push_source(obj._context.main_path) jar_context.push_source(obj.path.full_path) install_target = obj.install_target if jarinfo.base: install_target = mozpath.normpath( mozpath.join(install_target, jarinfo.base)) jar_context['FINAL_TARGET'] = install_target if obj.defines: jar_context['DEFINES'] = obj.defines.defines files = jar_context['FINAL_TARGET_FILES'] files_pp = jar_context['FINAL_TARGET_PP_FILES'] localized_files = jar_context['LOCALIZED_FILES'] localized_files_pp = jar_context['LOCALIZED_PP_FILES'] for e in jarinfo.entries: if e.is_locale: if jarinfo.relativesrcdir: src = '/%s' % jarinfo.relativesrcdir else: src = '' src = mozpath.join(src, 'en-US', e.source) else: src = e.source src = Path(jar_context, src) if '*' not in e.source and not os.path.exists(src.full_path): if e.is_locale: raise Exception('%s: Cannot find %s (tried %s)' % (obj.path, e.source, src.full_path)) if e.source.startswith('/'): src = Path(jar_context, '!' + e.source) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = Path(obj._context, '!' + e.source) output_basename = mozpath.basename(e.output) if output_basename != src.target_basename: src = RenamedSourcePath(jar_context, (src, output_basename)) path = mozpath.dirname(mozpath.join(jarinfo.name, e.output)) if e.preprocess: if '*' in e.source: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) if e.is_locale: localized_files_pp[path] += [src] else: files_pp[path] += [src] else: if e.is_locale: localized_files[path] += [src] else: files[path] += [src] if files: self.consume_object(FinalTargetFiles(jar_context, files)) if files_pp: self.consume_object( FinalTargetPreprocessedFiles(jar_context, files_pp)) if localized_files: self.consume_object( LocalizedFiles(jar_context, localized_files)) if localized_files_pp: self.consume_object( LocalizedPreprocessedFiles(jar_context, localized_files_pp)) for m in jarinfo.chrome_manifests: entry = parse_manifest_line( mozpath.dirname(jarinfo.name), m.replace('%', mozpath.basename(jarinfo.name) + '/')) self.consume_object( ChromeManifestEntry(jar_context, '%s.manifest' % jarinfo.name, entry))
def all_non_static_basenames(self): return [mozpath.basename(s) for s in self.all_non_static_sources()]
def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) for path, files in obj.files.walk(): self._add_features(target, path) for f in files: output_group = None if any( mozpath.match(mozpath.basename(f), p) for p in self._compile_env_files): output_group = self._installed_files if not isinstance(f, ObjDirPath): backend_file = self._get_backend_file( mozpath.join(target, path)) if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception( "Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self.backend_input_files.add(prefix) output_dir = '' # If we have a RenamedSourcePath here, the common backend # has generated this object from a jar manifest, and we # can rely on 'path' to be our destination path relative # to any wildcard match. Otherwise, the output file may # contribute to our destination directory. if not isinstance(f, RenamedSourcePath): output_dir = ''.join( _prefix(mozpath.dirname(f))) finder = FileFinder(prefix) for p, _ in finder.find(f.full_path[len(prefix):]): install_dir = prefix[len(obj.srcdir) + 1:] output = p if f.target_basename and '*' not in f.target_basename: output = mozpath.join( f.target_basename, output) backend_file.symlink_rule( mozpath.join(prefix, p), output=mozpath.join(output_dir, output), output_group=output_group) else: backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group) else: if (self.environment.is_artifact_build and any( mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)): # If we have an artifact build we never would have generated this file, # so do not attempt to install it. continue # We're not generating files in these directories yet, so # don't attempt to install files generated from them. if f.context.relobjdir not in ('toolkit/library', 'js/src/shell'): output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path, f.target_basename) gen_backend_file = self._get_backend_file( f.context.relobjdir) if gen_backend_file.requires_delay([f]): gen_backend_file.delayed_installed_files.append( (f.full_path, output, output_group)) else: gen_backend_file.symlink_rule( f.full_path, output=output, output_group=output_group)
def _link_libraries(self, context, obj, variable): """Add linkage declarations to a given object.""" assert isinstance(obj, Linkable) for path in context.get(variable, []): force_static = path.startswith('static:') and obj.KIND == 'target' if force_static: path = path[7:] name = mozpath.basename(path) dir = mozpath.dirname(path) candidates = [l for l in self._libs[name] if l.KIND == obj.KIND] if dir: if dir.startswith('/'): dir = mozpath.normpath(mozpath.join( obj.topobjdir, dir[1:])) else: dir = mozpath.normpath(mozpath.join(obj.objdir, dir)) dir = mozpath.relpath(dir, obj.topobjdir) candidates = [l for l in candidates if l.relobjdir == dir] if not candidates: # If the given directory is under one of the external # (third party) paths, use a fake library reference to # there. for d in self._external_paths: if dir.startswith('%s/' % d): candidates = [ self._get_external_library( dir, name, force_static) ] break if not candidates: raise SandboxValidationError( '%s contains "%s", but there is no "%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), context) if len(candidates) > 1: # If there's more than one remaining candidate, it could be # that there are instances for the same library, in static and # shared form. libs = {} for l in candidates: key = mozpath.join(l.relobjdir, l.basename) if force_static: if isinstance(l, StaticLibrary): libs[key] = l else: if key in libs and isinstance(l, SharedLibrary): libs[key] = l if key not in libs: libs[key] = l candidates = libs.values() if force_static and not candidates: if dir: raise SandboxValidationError( '%s contains "static:%s", but there is no static ' '"%s" %s in %s.' % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), context) raise SandboxValidationError( '%s contains "static:%s", but there is no static "%s" ' '%s in the tree' % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]), context) if not candidates: raise SandboxValidationError( '%s contains "%s", which does not match any %s in the tree.' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]), context) elif len(candidates) > 1: paths = (mozpath.join(l.relativedir, 'moz.build') for l in candidates) raise SandboxValidationError( '%s contains "%s", which matches a %s defined in multiple ' 'places:\n %s' % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND], '\n '.join(paths)), context) elif force_static and not isinstance(candidates[0], StaticLibrary): raise SandboxValidationError( '%s contains "static:%s", but there is only a shared "%s" ' 'in %s. You may want to add FORCE_STATIC_LIB=True in ' '%s/moz.build, or remove "static:".' % (variable, path, name, candidates[0].relobjdir, candidates[0].relobjdir), context) elif isinstance(obj, StaticLibrary) and isinstance( candidates[0], SharedLibrary): self._static_linking_shared.add(obj) obj.link_library(candidates[0]) # Link system libraries from OS_LIBS/HOST_OS_LIBS. for lib in context.get(variable.replace('USE', 'OS'), []): obj.link_system_library(lib)
def all_regular_bindinggen_stems(self): for stem in self.all_regular_stems(): yield '%sBinding' % stem for source in self.generated_events_sources: yield mozpath.splitext(mozpath.basename(source))[0]
def __init__(self, context, source, module): ContextDerived.__init__(self, context) self.source_path = source self.basename = mozpath.basename(source) self.module = module
def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj, defines) elif isinstance(obj, VariablePassthru) and \ obj.install_target.startswith('dist/bin'): for f in obj.variables.get('EXTRA_COMPONENTS', {}): path = mozpath.join(obj.install_target, 'components', mozpath.basename(f)) self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join('components', mozpath.basename(f))) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) for f in obj.variables.get('EXTRA_PP_COMPONENTS', {}): self._add_preprocess(obj, f, 'components', defines=defines) if f.endswith('.manifest'): manifest = mozpath.join(obj.install_target, 'chrome.manifest') self._manifest_entries[manifest].append( 'manifest components/%s' % mozpath.basename(f)) elif isinstance(obj, JavaScriptModules) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.modules.walk(): base = mozpath.join('modules', path) for f in strings: if obj.flavor == 'extra': self._install_manifests[ obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f))) elif obj.flavor == 'extra_pp': self._add_preprocess(obj, f, base, defines=defines) elif isinstance(obj, JsPreferenceFile) and \ obj.install_target.startswith('dist/bin'): # The condition for the directory value in config/rules.mk is: # ifneq (,$(DIST_SUBDIR)$(XPI_NAME)) # - when XPI_NAME is set, obj.install_target will start with # dist/xpi-stage # - when DIST_SUBDIR is set, obj.install_target will start with # dist/bin/$(DIST_SUBDIR) # So an equivalent condition that is not cumbersome for us and that # is enough at least for now is checking if obj.install_target is # different from dist/bin. if obj.install_target == 'dist/bin': pref_dir = 'defaults/pref' else: pref_dir = 'defaults/preferences' dest = mozpath.join(obj.install_target, pref_dir, mozpath.basename(obj.path)) # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. self._add_preprocess(obj, obj.path, pref_dir, defines=defines, silence_missing_directive_warnings=True) elif isinstance(obj, Resources) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.resources.walk(): base = mozpath.join('res', path) for f in strings: flags = strings.flags_for(f) if flags and flags.preprocess: self._add_preprocess(obj, f, base, marker='%', defines=obj.defines) else: self._install_manifests[ obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(base, mozpath.basename(f))) elif isinstance(obj, FinalTargetFiles) and \ obj.install_target.startswith('dist/bin'): for path, strings in obj.files.walk(): base = mozpath.join(obj.install_target, path) for f in strings: self._install_manifests[obj.install_target].add_symlink( mozpath.join(obj.srcdir, f), mozpath.join(path, mozpath.basename(f))) elif isinstance(obj, DistFiles) and \ obj.install_target.startswith('dist/bin'): # We preprocess these, but they don't necessarily have preprocessor # directives, so tell the preprocessor to not complain about that. for f in obj.files: self._add_preprocess(obj, f, '', defines=defines, silence_missing_directive_warnings=True) else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True
def read_from_gyp(config, path, output, vars, non_unified_sources=set()): """Read a gyp configuration and emits GypContexts for the backend to process. config is a ConfigEnvironment, path is the path to a root gyp configuration file, output is the base path under which the objdir for the various gyp dependencies will be, and vars a dict of variables to pass to the gyp processor. """ # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. path = encode(path) str_vars = dict((name, encode(value)) for name, value in vars.items()) params = { b'parallel': False, b'generator_flags': {}, b'build_files': [path], } # Files that gyp_chromium always includes includes = [encode(mozpath.join(script_dir, 'common.gypi'))] finder = FileFinder(chrome_src, find_executables=False) includes.extend( encode(mozpath.join(chrome_src, name)) for name, _ in finder.find('*/supplement.gypi')) # Read the given gyp file and its dependencies. generator, flat_list, targets, data = \ gyp.Load([path], format=b'mozbuild', default_variables=str_vars, includes=includes, depth=encode(mozpath.dirname(path)), params=params) # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if spec['type'] == 'none': continue elif spec['type'] == 'static_library': # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) s = SourcePath(context, f) if ext == '.h': continue if ext != '.S' and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) for define in target_conf.get('defines', []): if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True for include in target_conf.get('include_dirs', []): # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: context[var].extend(flags) else: # Ignore other types than static_library because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '/ipc/chromium/src', '/ipc/glue', ] context['GENERATED_INCLUDES'] += ['/ipc/ipdl/_ipdlheaders'] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True yield context
def _process_test_manifest(self, sandbox, info, manifest_path): flavor, install_prefix, filter_inactive = info manifest_path = mozpath.normpath(manifest_path) path = mozpath.normpath(mozpath.join(sandbox['SRCDIR'], manifest_path)) manifest_dir = mozpath.dirname(path) manifest_reldir = mozpath.dirname( mozpath.relpath(path, sandbox['TOPSRCDIR'])) try: m = manifestparser.TestManifest(manifests=[path], strict=True) if not m.tests: raise SandboxValidationError('Empty test manifest: %s' % path) obj = TestManifest(sandbox, path, m, flavor=flavor, install_prefix=install_prefix, relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), dupe_manifest='dupe-manifest' in m.tests[0]) filtered = m.tests if filter_inactive: filtered = m.active_tests(disabled=False, **self.mozinfo) out_dir = mozpath.join(install_prefix, manifest_reldir) # "head" and "tail" lists. # All manifests support support-files. # # Keep a set of already seen support file patterns, because # repeatedly processing the patterns from the default section # for every test is quite costly (see bug 922517). extras = (('head', set()), ('tail', set()), ('support-files', set())) for test in filtered: obj.tests.append(test) obj.installs[mozpath.normpath(test['path'])] = \ mozpath.join(out_dir, test['relpath']) for thing, seen in extras: value = test.get(thing, '') if value in seen: continue seen.add(value) for pattern in value.split(): # We only support globbing on support-files because # the harness doesn't support * for head and tail. if '*' in pattern and thing == 'support-files': obj.pattern_installs.append( (manifest_dir, pattern, out_dir)) else: full = mozpath.normpath( mozpath.join(manifest_dir, pattern)) # Only install paths in our directory. This # rule is somewhat arbitrary and could be lifted. if not full.startswith(manifest_dir): continue obj.installs[full] = mozpath.join(out_dir, pattern) # We also copy the manifest into the output directory. out_path = mozpath.join(out_dir, mozpath.basename(manifest_path)) obj.installs[path] = out_path # Some manifests reference files that are auto generated as # part of the build or shouldn't be installed for some # reason. Here, we prune those files from the install set. # FUTURE we should be able to detect autogenerated files from # other build metadata. Once we do that, we can get rid of this. for f in m.tests[0].get('generated-files', '').split(): # We re-raise otherwise the stack trace isn't informative. try: del obj.installs[mozpath.join(manifest_dir, f)] except KeyError: raise SandboxValidationError( 'Error processing test ' 'manifest %s: entry in generated-files not present ' 'elsewhere in manifest: %s' % (path, f)) obj.external_installs.add(mozpath.join(out_dir, f)) yield obj except (AssertionError, Exception): raise SandboxValidationError( 'Error processing test ' 'manifest file %s: %s' % (path, '\n'.join(traceback.format_exception(*sys.exc_info()))))