def __init__(self, topsrcdir, topobjdir, dist, group=Grouping.NO, abspaths=False): topsrcdir = mozpath.normsep(os.path.normcase(os.path.abspath(topsrcdir))) topobjdir = mozpath.normsep(os.path.normcase(os.path.abspath(topobjdir))) dist = mozpath.normsep(os.path.normcase(os.path.abspath(dist))) if abspaths: topsrcdir_value = topsrcdir topobjdir_value = topobjdir dist_value = dist else: topsrcdir_value = '$(topsrcdir)' topobjdir_value = '$(DEPTH)' dist_value = '$(DIST)' self._normpaths = { topsrcdir: topsrcdir_value, topobjdir: topobjdir_value, dist: dist_value, '$(topsrcdir)': topsrcdir_value, '$(DEPTH)': topobjdir_value, '$(DIST)': dist_value, '$(depth)': topobjdir_value, # normcase may lowercase variable refs when '$(dist)': dist_value, # they are in the original dependency file mozpath.relpath(topsrcdir, os.curdir): topsrcdir_value, mozpath.relpath(topobjdir, os.curdir): topobjdir_value, mozpath.relpath(dist, os.curdir): dist_value, } Makefile.__init__(self) self._group = group self._targets = OrderedDict()
def consume_object(self, obj): if isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): defines = obj.defines or {} if defines: defines = defines.defines for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, target=f.target_basename, defines=defines) elif '*' in f: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self._install_manifests[obj.install_target] \ .add_pattern_symlink( prefix, f.full_path[len(prefix):], mozpath.join(path, f.target_basename)) else: self._install_manifests[obj.install_target].add_symlink( f.full_path, mozpath.join(path, f.target_basename) ) if isinstance(f, ObjDirPath): dep_target = 'install-%s' % obj.install_target self._dependencies[dep_target].append( mozpath.relpath(f.full_path, self.environment.topobjdir)) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath(obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # We're not actually handling XPIDL files. return False else: return False return True
def _process_library_definition(self, libdef, backend_file): backend_file.write("LIBRARY_NAME = %s\n" % libdef.basename) thisobjdir = libdef.objdir topobjdir = libdef.topobjdir.replace(os.sep, "/") for objdir, basename in libdef.static_libraries: # If this is an external objdir (i.e., comm-central), use the other # directory instead of $(DEPTH). if objdir.startswith(topobjdir + "/"): relpath = "$(DEPTH)/%s" % mozpath.relpath(objdir, topobjdir) else: relpath = mozpath.relpath(objdir, thisobjdir) backend_file.write("SHARED_LIBRARY_LIBS += %s/$(LIB_PREFIX)%s.$(LIB_SUFFIX)\n" % (relpath, basename))
def add(self, path, content): chrome = self._chromepath(path) if chrome: jar = chrome + '.jar' if not self.copier.contains(jar): self.copier.add(jar, Jarrer(self._compress, self._optimize)) if not self.copier[jar].contains(mozpath.relpath(path, chrome)): self.copier[jar].add(mozpath.relpath(path, chrome), content) else: FlatFormatter.add(self, path, content)
def _write_file(self, path=None, fh=None): """Context manager to write a file. This is a glorified wrapper around FileAvoidWrite with integration to update the BackendConsumeSummary on this instance. Example usage: with self._write_file('foo.txt') as fh: fh.write('hello world') """ if path is not None: assert fh is None fh = FileAvoidWrite(path) else: assert fh is not None dirname = mozpath.dirname(fh.name) try: os.makedirs(dirname) except OSError as error: if error.errno != errno.EEXIST: raise yield fh self._backend_output_files.add(mozpath.relpath(fh.name, self.environment.topobjdir)) existed, updated = fh.close() if not existed: self.summary.created_count += 1 elif updated: self.summary.updated_count += 1 else: self.summary.unchanged_count += 1
def _process_reftest_manifest(self, sandbox, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath(mozpath.join( sandbox['SRCDIR'], manifest_path)) manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, sandbox['TOPSRCDIR'])) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. obj = TestManifest(sandbox, manifest_full_path, manifest, flavor=flavor, install_prefix='%s/' % flavor, relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path))) for test in sorted(manifest.files): obj.tests.append({ 'path': test, 'here': mozpath.dirname(test), 'manifest': manifest_full_path, 'name': mozpath.basename(test), 'head': '', 'tail': '', 'support-files': '', 'subsuite': '', }) yield obj
def process_package_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: with tarfile.open(filename) as reader: for f in reader: if not f.isfile(): continue if not any(mozpath.match(f.name, p) for p in self.package_artifact_patterns): continue # We strip off the relative "firefox/" bit from the path, # but otherwise preserve it. destpath = mozpath.join('bin', mozpath.relpath(f.name, "firefox")) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') writer.add(destpath.encode('utf-8'), reader.extractfile(f), mode=f.mode) added_entry = True if not added_entry: raise ValueError('Archive format changed! No pattern from "{patterns}" ' 'matched an archive path.'.format( patterns=LinuxArtifactJob.package_artifact_patterns))
def _process_reftest_manifest(self, sandbox, flavor, manifest_path): manifest_path = mozpath.normpath(manifest_path) manifest_full_path = mozpath.normpath(mozpath.join(sandbox["SRCDIR"], manifest_path)) manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, sandbox["TOPSRCDIR"])) manifest = reftest.ReftestManifest() manifest.load(manifest_full_path) # reftest manifests don't come from manifest parser. But they are # similar enough that we can use the same emitted objects. Note # that we don't perform any installs for reftests. obj = TestManifest( sandbox, manifest_full_path, manifest, flavor=flavor, install_prefix="%s/" % flavor, relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)), ) for test in sorted(manifest.files): obj.tests.append( { "path": test, "here": mozpath.dirname(test), "manifest": manifest_full_path, "name": mozpath.basename(test), "head": "", "tail": "", "support-files": "", "subsuite": "", } ) yield obj
def all_files(self, base): all_files = set() for root, dirs, files in os.walk(base): for f in files: all_files.add( mozpath.join(mozpath.relpath(root, base), f)) return all_files
def is_resource(self, path, base=None): ''' Return whether the given path corresponds to a resource to be put in an omnijar archive. ''' if base is None: base = self._get_base(path) path = mozpath.relpath(path, base) if any(mozpath.match(path, p.replace('*', '**')) for p in self._non_resources): return False path = mozpath.split(path) if path[0] == 'chrome': return len(path) == 1 or path[1] != 'icons' if path[0] == 'components': return path[-1].endswith(('.js', '.xpt')) if path[0] == 'res': return len(path) == 1 or \ (path[1] != 'cursors' and path[1] != 'MainMenu.nib') if path[0] == 'defaults': return len(path) != 3 or \ not (path[2] == 'channel-prefs.js' and path[1] in ['pref', 'preferences']) return path[0] in [ 'modules', 'greprefs.js', 'hyphenation', 'update.locale', ] or path[0] in STARTUP_CACHE_PATHS
def normalize_path(path): ''' Remove any bin/ prefix. ''' if mozpath.basedir(path, ['bin']) == 'bin': return mozpath.relpath(path, 'bin') return path
def process_tests_artifact(self, filename, processed_filename): from mozbuild.action.test_archive import OBJDIR_TEST_FILES added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: reader = JarReader(filename) for filename, entry in reader.entries.iteritems(): for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns: if not mozpath.match(filename, pattern): continue destpath = mozpath.relpath(filename, src_prefix) destpath = mozpath.join(dest_prefix, destpath) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) added_entry = True break for files_entry in OBJDIR_TEST_FILES.values(): origin_pattern = files_entry['pattern'] leaf_filename = filename if 'dest' in files_entry: dest = files_entry['dest'] origin_pattern = mozpath.join(dest, origin_pattern) leaf_filename = filename[len(dest) + 1:] if mozpath.match(filename, origin_pattern): destpath = mozpath.join('..', files_entry['base'], leaf_filename) mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) if not added_entry: raise ValueError('Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=LinuxArtifactJob.test_artifact_patterns))
def _get_files_info(self, paths): from mozpack.files import FileFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) finder = FileFinder(self.topsrcdir, find_executables=False) # Expand wildcards. allpaths = [] for p in relpaths: if '*' not in p: if p not in allpaths: allpaths.append(p) continue for path, f in finder.find(p): if path not in allpaths: allpaths.append(path) reader = self._get_reader() return reader.files_info(allpaths)
def consume_object(self, obj): if isinstance(obj, JARManifest) and obj.install_target.startswith("dist/bin"): self._consume_jar_manifest(obj) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and obj.install_target.startswith( "dist/bin" ): defines = obj.defines or {} if defines: defines = defines.defines for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, target=f.target_basename, defines=defines) elif "*" in f: def _prefix(s): for p in mozpath.split(s): if "*" not in p: yield p + "/" prefix = "".join(_prefix(f.full_path)) self._install_manifests[obj.install_target].add_pattern_symlink( prefix, f.full_path[len(prefix) :], mozpath.join(path, f.target_basename) ) else: self._install_manifests[obj.install_target].add_symlink( f.full_path, mozpath.join(path, f.target_basename) ) if isinstance(f, ObjDirPath): dep_target = "install-%s" % obj.install_target self._dependencies[dep_target].append(mozpath.relpath(f.full_path, self.environment.topobjdir)) elif isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith("dist/bin"): top_level = mozpath.join(obj.install_target, "chrome.manifest") if obj.path != top_level: entry = "manifest %s" % mozpath.relpath(obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # We currently ignore a lot of object types, so just acknowledge # everything. return True
def test_relpath(self): self.assertEqual(relpath("foo", "foo"), "") self.assertEqual(relpath(os.path.join("foo", "bar"), "foo/bar"), "") self.assertEqual(relpath(os.path.join("foo", "bar"), "foo"), "bar") self.assertEqual(relpath(os.path.join("foo", "bar", "baz"), "foo"), "bar/baz") self.assertEqual(relpath(os.path.join("foo", "bar"), "foo/bar/baz"), "..") self.assertEqual(relpath(os.path.join("foo", "bar"), "foo/baz"), "../bar") self.assertEqual(relpath("foo/", "foo"), "") self.assertEqual(relpath("foo/bar/", "foo"), "bar")
def add_manifest(self, entry): if isinstance(entry, ManifestBinaryComponent): formatter, base = super(OmniJarFormatter, self), '' else: formatter, base, path = self._get_formatter(entry.base, is_resource=True) entry = entry.move(mozpath.relpath(entry.base, base)) formatter.add_manifest(entry)
def contains(self, path): assert '*' not in path if self.copier.contains(path): return True for base, copier in self.omnijars.iteritems(): if copier.contains(mozpath.relpath(path, base)): return True return False
def _get_base(self, path): ''' Return the deepest base directory containing the given path. ''' self._frozen_bases = True base = mozpath.basedir(path, self._sub_formatter.keys()) relpath = mozpath.relpath(path, base) if base else path return base, relpath
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs): """Installs the requested test files to the objdir. This is invoked by test runners to avoid installing tens of thousands of test files when only a few tests need to be run. """ flavor_info = {flavor: (root, prefix, install) for (flavor, root, prefix, install) in TEST_MANIFESTS.values()} objdir_dest = mozpath.join(topobjdir, tests_root) converter = SupportFilesConverter() install_info = TestInstallInfo() for o in test_objs: flavor = o['flavor'] if flavor not in flavor_info: # This is a test flavor that isn't installed by the build system. continue root, prefix, install = flavor_info[flavor] if not install: # This flavor isn't installed to the objdir. continue manifest_path = o['manifest'] manifest_dir = mozpath.dirname(manifest_path) out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:]) file_relpath = o['file_relpath'] source = mozpath.join(topsrcdir, file_relpath) dest = mozpath.join(root, prefix, file_relpath) if 'install-to-subdir' in o: out_dir = mozpath.join(out_dir, o['install-to-subdir']) manifest_relpath = mozpath.relpath(source, mozpath.dirname(manifest_path)) dest = mozpath.join(out_dir, manifest_relpath) install_info.installs.append((source, dest)) install_info |= converter.convert_support_files(o, root, manifest_dir, out_dir) manifest = InstallManifest() for source, dest in set(install_info.installs): if dest in install_info.external_installs: continue manifest.add_symlink(source, dest) for base, pattern, dest in install_info.pattern_installs: manifest.add_pattern_symlink(base, pattern, dest) _resolve_installs(install_info.deferred_installs, topobjdir, manifest) # Harness files are treated as a monolith and installed each time we run tests. # Fortunately there are not very many. manifest |= InstallManifest(mozpath.join(topobjdir, '_build_manifests', 'install', tests_root)) copier = FileCopier() manifest.populate_registry(copier) copier.copy(objdir_dest, remove_unaccounted=False)
def _get_preprocessor(self, obj): '''Returns a preprocessor with a few predefined values depending on the given BaseConfigSubstitution(-like) object, and all the substs in the current environment.''' pp = Preprocessor() srcdir = mozpath.dirname(obj.input_path) pp.context.update(obj.config.substs) pp.context.update( top_srcdir=obj.topsrcdir, srcdir=srcdir, relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or '.', DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path)) or '.', ) pp.do_filter('attemptSubstitution') pp.setMarker(None) with self._write_file(obj.output_path) as fh: pp.out = fh yield pp
def contains(self, path): assert '*' not in path chrome = self._chromepath(path) if not chrome: return self.copier.contains(path) if not self.copier.contains(chrome + '.jar'): return False return self.copier[chrome + '.jar']. \ contains(mozpath.relpath(path, chrome))
def consume_object(self, obj): if not isinstance(obj, Defines) and isinstance(obj, ContextDerived): defines = self._defines.get(obj.objdir, {}) if defines: defines = defines.defines if isinstance(obj, Defines): self._defines[obj.objdir] = obj # We're assuming below that Defines come first for a given objdir, # which is kind of set in stone from the order things are treated # in emitter.py. assert obj.objdir not in self._seen_directories elif isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj, defines) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): for path, files in obj.files.walk(): for f in files: if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, f.full_path, path, defines=defines) else: self._install_manifests[obj.install_target].add_symlink( f.full_path, mozpath.join(path, mozpath.basename(f)) ) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath(obj.path, obj.install_target) if entry not in self._manifest_entries[top_level]: self._manifest_entries[top_level].append(entry) self._manifest_entries[obj.path].append(str(obj.entry)) elif isinstance(obj, XPIDLFile): self._has_xpidl = True # XPIDL are emitted before Defines, which breaks the assert in the # branch for Defines. OTOH, we don't actually care about the # XPIDLFile objects just yet, so we can just pretend we didn't see # an object in the directory yet. return True else: # We currently ignore a lot of object types, so just acknowledge # everything. return True self._seen_directories.add(obj.objdir) return True
def _handle_webidl_build(self, bindings_dir, unified_source_mapping, webidls, expected_build_output_files, global_define_files): include_dir = mozpath.join(self.environment.topobjdir, 'dist', 'include') for f in expected_build_output_files: if f.startswith(include_dir): manifest, reltarget = self._get_manifest_from_target('dist/include') manifest.add_optional_exists(mozpath.join(reltarget, mozpath.relpath(f, include_dir)))
def consume_finished(self): mk = Makefile() # Add the default rule at the very beginning. mk.create_rule(['default']) mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir) mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir) mk.add_statement('BACKEND = %s' % self._backend_output_list_file) if not self._has_xpidl: mk.add_statement('NO_XPIDL = 1') # Add a few necessary variables inherited from configure for var in ( 'PYTHON', 'ACDEFINES', 'MOZ_BUILD_APP', 'MOZ_WIDGET_TOOLKIT', ): mk.add_statement('%s = %s' % (var, self.environment.substs[var])) install_manifests_bases = self._install_manifests.keys() # Add information for chrome manifest generation manifest_targets = [] for target, entries in self._manifest_entries.iteritems(): manifest_targets.append(target) install_target = mozpath.basedir(target, install_manifests_bases) self._install_manifests[install_target].add_content( ''.join('%s\n' % e for e in sorted(entries)), mozpath.relpath(target, install_target)) # Add information for install manifests. mk.add_statement('INSTALL_MANIFESTS = %s' % ' '.join(self._install_manifests.keys())) # Add dependencies we infered: for target, deps in self._dependencies.iteritems(): mk.create_rule([target]).add_dependencies( '$(TOPOBJDIR)/%s' % d for d in deps) # Add backend dependencies: mk.create_rule([self._backend_output_list_file]).add_dependencies( self.backend_input_files) mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk') for base, install_manifest in self._install_manifests.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'install_%s' % base.replace('/', '_'))) as fh: install_manifest.write(fileobj=fh) with self._write_file( mozpath.join(self.environment.topobjdir, 'faster', 'Makefile')) as fh: mk.dump(fh, removal_guard=False)
def _get_preprocessor(self, obj): """Returns a preprocessor with a few predefined values depending on the given BaseConfigSubstitution(-like) object, and all the substs in the current environment.""" pp = Preprocessor() srcdir = mozpath.dirname(obj.input_path) pp.context.update({k: " ".join(v) if isinstance(v, list) else v for k, v in obj.config.substs.iteritems()}) pp.context.update( top_srcdir=obj.topsrcdir, topobjdir=obj.topobjdir, srcdir=srcdir, relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or ".", DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path)) or ".", ) pp.do_filter("attemptSubstitution") pp.setMarker(None) with self._write_file(obj.output_path) as fh: pp.out = fh yield pp
def test_relpath(self): self.assertEqual(relpath('foo', 'foo'), '') self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar'), '') self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo'), 'bar') self.assertEqual(relpath(self.SEP.join(('foo', 'bar', 'baz')), 'foo'), 'bar/baz') self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/bar/baz'), '..') self.assertEqual(relpath(self.SEP.join(('foo', 'bar')), 'foo/baz'), '../bar') self.assertEqual(relpath('foo/', 'foo'), '') self.assertEqual(relpath('foo/bar/', 'foo'), 'bar')
def test_final_target_files_wildcard(self): """Ensure that wildcards in FINAL_TARGET_FILES work properly.""" env = self._consume('final-target-files-wildcard', FasterMakeBackend) m = InstallManifest(path=mozpath.join(env.topobjdir, 'faster', 'install_dist_bin')) self.assertEqual(len(m), 1) reg = FileRegistry() m.populate_registry(reg) expected = [('foo/bar.xyz', 'bar.xyz'), ('foo/foo.xyz', 'foo.xyz')] actual = [(path, mozpath.relpath(f.path, env.topsrcdir)) for (path, f) in reg] self.assertEqual(expected, actual)
def _get_files_info(self, paths, rev=None): from mozbuild.frontend.reader import default_finder from mozpack.files import FileFinder, MercurialRevisionFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) repo = None if rev: hg_path = os.path.join(self.topsrcdir, '.hg') if not os.path.exists(hg_path): raise InvalidPathException('a Mercurial repo is required ' 'when specifying a revision') repo = self.topsrcdir # We need two finders because the reader's finder operates on # absolute paths. finder = FileFinder(self.topsrcdir) if repo: reader_finder = MercurialRevisionFinder(repo, rev=rev, recognize_repo_paths=True) else: reader_finder = default_finder # Expand wildcards. # One variable is for ordering. The other for membership tests. # (Membership testing on a list can be slow.) allpaths = [] all_paths_set = set() for p in relpaths: if '*' not in p: if p not in all_paths_set: all_paths_set.add(p) allpaths.append(p) continue if repo: raise InvalidPathException('cannot use wildcard in version control mode') for path, f in finder.find(p): if path not in all_paths_set: all_paths_set.add(path) allpaths.append(path) reader = self._get_reader(finder=reader_finder) return reader.files_info(allpaths)
def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources, sorted_nonstatic_ipdl_sources, sorted_static_ipdl_sources, unified_ipdl_cppsrcs_mapping): # Preferably we wouldn't have to import ipdl, but we need to parse the # ast in order to determine the namespaces since they are used in the # header output paths. sys.path.append(mozpath.join(self.environment.topsrcdir, 'ipc', 'ipdl')) import ipdl backend_file = self._get_backend_file('ipc/ipdl') outheaderdir = '_ipdlheaders' srcdir = mozpath.join(self.environment.topsrcdir, 'ipc/ipdl') cmd = [ '$(PYTHON_PATH)', '$(PLY_INCLUDE)', '%s/ipdl.py' % srcdir, '--sync-msg-list=%s/sync-messages.ini' % srcdir, '--msg-metadata=%s/message-metadata.ini' % srcdir, '--outheaders-dir=%s' % outheaderdir, '--outcpp-dir=.', ] ipdldirs = sorted(set(mozpath.dirname(p) for p in sorted_ipdl_sources)) cmd.extend(['-I%s' % d for d in ipdldirs]) cmd.extend(sorted_ipdl_sources) outputs = ['IPCMessageTypeName.cpp', mozpath.join(outheaderdir, 'IPCMessageStart.h'), 'ipdl_lextab.py', 'ipdl_yacctab.py'] for filename in sorted_ipdl_sources: filepath, ext = os.path.splitext(filename) dirname, basename = os.path.split(filepath) dirname = mozpath.relpath(dirname, self.environment.topsrcdir) extensions = [''] if ext == '.ipdl': extensions.extend(['Child', 'Parent']) with open(filename) as f: ast = ipdl.parse(f.read(), filename, includedirs=ipdldirs) self.backend_input_files.add(filename) headerdir = os.path.join(outheaderdir, *([ns.name for ns in ast.namespaces])) for extension in extensions: outputs.append("%s%s.cpp" % (basename, extension)) outputs.append(mozpath.join(headerdir, '%s%s.h' % (basename, extension))) backend_file.rule( display='IPDL code generation', cmd=cmd, outputs=outputs, extra_outputs=[self._installed_files], check_unchanged=True, ) backend_file.sources['.cpp'].extend(u[0] for u in unified_ipdl_cppsrcs_mapping)
def _get_preprocessor(self, obj): '''Returns a preprocessor with a few predefined values depending on the given BaseConfigSubstitution(-like) object, and all the substs in the current environment.''' pp = Preprocessor() srcdir = mozpath.dirname(obj.input_path) pp.context.update({ k: ' '.join(v) if isinstance(v, list) else v for k, v in obj.config.substs.iteritems() }) pp.context.update( top_srcdir=obj.topsrcdir, topobjdir=obj.topobjdir, srcdir=srcdir, relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or '.', DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path)) or '.', ) pp.do_filter('attemptSubstitution') pp.setMarker(None) with self._write_file(obj.output_path) as fh: pp.out = fh yield pp
def main(output, input_asm, ffi_h, ffi_config_h, defines, includes): defines = shlex.split(defines) includes = shlex.split(includes) # CPP uses -E which generates #line directives. -EP suppresses them. cpp = buildconfig.substs['CPP'] + ['-EP'] input_asm = mozpath.relpath(input_asm, os.getcwd()) args = cpp + defines + includes + [input_asm] print(' '.join(args)) preprocessed = subprocess.check_output(args) r = re.compile('F[dpa][^ ]*') for line in preprocessed.splitlines(): output.write(r.sub('', line)) output.write('\n')
def _handle_final_target_files(self, obj): for path, files in obj.files.walk(): for f in files: dest = mozpath.join(obj.install_target, path, f.target_basename) obj_path = mozpath.join(self.environment.topobjdir, dest) if obj_path.endswith('.in'): obj_path = obj_path[:-3] if isinstance(obj, FinalTargetPreprocessedFiles): assert os.path.exists(obj_path), '%s should exist' % obj_path pp_info = generate_pp_info(obj_path, obj.topsrcdir) else: pp_info = None self._install_mapping[dest] = mozpath.relpath(f.full_path, obj.topsrcdir), pp_info
def update_sources(topsrcdir): print('Updating ICU sources lists...') sys.path.append(mozpath.join(topsrcdir, 'build/pymake')) for d in ['common', 'i18n']: base_path = mozpath.join(topsrcdir, 'intl/icu/source/%s' % d) makefile = mozpath.join(base_path, 'Makefile.in') mozbuild = mozpath.join(topsrcdir, 'config/external/icu/%s/sources.mozbuild' % d) sources = [mozpath.relpath(s, topsrcdir) for s in get_sources_from_makefile(makefile)] headers = [mozpath.normsep(os.path.relpath(s, topsrcdir)) for s in list_headers(mozpath.join(base_path, 'unicode'))] write_sources(mozbuild, sources, headers)
def tests_by_manifest(self): if not self._tests_by_manifest: for test in self.tests: if test['flavor'] == "web-platform-tests": # Use test ids instead of paths for WPT. self._tests_by_manifest[test['manifest']].append( test['name']) else: relpath = mozpath.relpath( test['path'], mozpath.dirname(test['manifest'])) self._tests_by_manifest[test['manifest_relpath']].append( relpath) return self._tests_by_manifest
def tests_by_manifest(self): if not self._tests_by_manifest: for test in self.tests: if test["flavor"] == "web-platform-tests": # Use test ids instead of paths for WPT. self._tests_by_manifest[test["manifest"]].append( test["name"]) else: relpath = mozpath.relpath( test["path"], mozpath.dirname(test["manifest"])) self._tests_by_manifest[test["manifest_relpath"]].append( relpath) return self._tests_by_manifest
def make_base_task(config, name, job, script, command): """ Common config for thirdparty build tasks """ if config.params['level'] == '3': expires = '1 year' else: expires = '28 days' # To be consistent with how files-changed is used elsewhere, the # path must be relative to GECKO, script_rel = mozpath.relpath(script, GECKO) return { 'attributes': {}, 'name': name, 'description': job['description'], 'expires-after': expires, 'label': 'thirdparty-%s' % name, 'run-on-projects': [], 'index': { 'job-name': name, }, 'treeherder': { 'kind': 'build', 'tier': 1, }, 'run': { 'using': 'run-task', 'checkout': True, 'comm-checkout': True, 'command': command, 'sparse-profile': 'toolchain-build', }, 'worker-type': 'b-linux', 'worker': { 'chain-of-trust': True, 'env': { 'WORKSPACE': '/builds/worker/workspace', 'UPLOAD_DIR': '/builds/worker/artifacts', }, 'max-run-time': 900, }, 'fetches': {}, 'when': { 'files-changed': [ script_rel, config.path, ] } }
def __init__(self, topsrcdir, topobjdir, dist, group=Grouping.NO, abspaths=False): topsrcdir = mozpath.normsep( os.path.normcase(os.path.abspath(topsrcdir))) topobjdir = mozpath.normsep( os.path.normcase(os.path.abspath(topobjdir))) dist = mozpath.normsep(os.path.normcase(os.path.abspath(dist))) if abspaths: topsrcdir_value = topsrcdir topobjdir_value = topobjdir dist_value = dist else: topsrcdir_value = '$(topsrcdir)' topobjdir_value = '$(DEPTH)' dist_value = '$(DIST)' self._normpaths = { topsrcdir: topsrcdir_value, topobjdir: topobjdir_value, dist: dist_value, '$(topsrcdir)': topsrcdir_value, '$(DEPTH)': topobjdir_value, '$(DIST)': dist_value, '$(depth)': topobjdir_value, # normcase may lowercase variable refs when '$(dist)': dist_value, # they are in the original dependency file mozpath.relpath(topsrcdir, os.curdir): topsrcdir_value, mozpath.relpath(topobjdir, os.curdir): topobjdir_value, mozpath.relpath(dist, os.curdir): dist_value, } Makefile.__init__(self) self._group = group self._targets = OrderedDict()
def _get_files_info(self, paths, rev=None): from mozbuild.frontend.reader import default_finder from mozpack.files import FileFinder, MercurialRevisionFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) repo = None if rev: hg_path = os.path.join(self.topsrcdir, '.hg') if not os.path.exists(hg_path): raise InvalidPathException('a Mercurial repo is required ' 'when specifying a revision') repo = self.topsrcdir # We need two finders because the reader's finder operates on # absolute paths. finder = FileFinder(self.topsrcdir, find_executables=False) if repo: reader_finder = MercurialRevisionFinder(repo, rev=rev, recognize_repo_paths=True) else: reader_finder = default_finder # Expand wildcards. allpaths = [] for p in relpaths: if '*' not in p: if p not in allpaths: allpaths.append(p) continue if repo: raise InvalidPathException( 'cannot use wildcard in version control mode') for path, f in finder.find(p): if path not in allpaths: allpaths.append(path) reader = self._get_reader(finder=reader_finder) return reader.files_info(allpaths)
def consume_object(self, obj): """Write out build files necessary to build with tup.""" if not isinstance(obj, ContextDerived): return False consumed = CommonBackend.consume_object(self, obj) if consumed: return True backend_file = self._get_backend_file_for(obj) if isinstance(obj, GeneratedFile): # These files are already generated by make before tup runs. skip_files = ( 'buildid.h', 'source-repo.h', ) if any(f in skip_files for f in obj.outputs): # Let the RecursiveMake backend handle these. return False if 'application.ini.h' in obj.outputs: # application.ini.h is a special case since we need to process # the FINAL_TARGET_PP_FILES for application.ini before running # the GENERATED_FILES script, and tup doesn't handle the rules # out of order. backend_file.delayed_generated_files.append(obj) else: self._process_generated_file(backend_file, obj) elif (isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith('dist/bin')): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, Defines): self._process_defines(backend_file, obj) elif isinstance(obj, HostDefines): self._process_defines(backend_file, obj, host=True) elif isinstance(obj, FinalTargetFiles): self._process_final_target_files(obj) elif isinstance(obj, FinalTargetPreprocessedFiles): self._process_final_target_pp_files(obj, backend_file) elif isinstance(obj, JARManifest): self._consume_jar_manifest(obj) return True
def __init__(self, *args, **kwargs): ConfigStatus.ConfigEnvironment.__init__(self, *args, **kwargs) # Be helpful to unit tests if 'top_srcdir' not in self.substs: if os.path.isabs(self.topsrcdir): top_srcdir = self.topsrcdir.replace(os.sep, '/') else: top_srcdir = mozpath.relpath(self.topsrcdir, self.topobjdir).replace( os.sep, '/') d = dict(self.substs) d['top_srcdir'] = top_srcdir self.substs = ReadOnlyDict(d)
def inner(*paths, **defaults): tests = defaultdict(list) for path in paths: if isinstance(path, tuple): path, kwargs = path else: kwargs = {} path = mozpath.normpath(path) manifest_name = kwargs.get('flavor', defaults.get('flavor', 'manifest')) manifest = kwargs.pop( 'manifest', defaults.pop( 'manifest', mozpath.join(mozpath.dirname(path), manifest_name + '.ini'))) manifest_abspath = mozpath.join(topsrcdir, manifest) relpath = mozpath.relpath(path, mozpath.dirname(manifest)) test = { 'name': relpath, 'path': mozpath.join(topsrcdir, path), 'relpath': relpath, 'file_relpath': path, 'flavor': 'faketest', 'dir_relpath': mozpath.dirname(path), 'here': mozpath.dirname(manifest_abspath), 'manifest': manifest_abspath, 'manifest_relpath': manifest, } test.update(**defaults) test.update(**kwargs) # Normalize paths to ensure that the fixture matches reality. for k in [ 'ancestor_manifest', 'manifest', 'manifest_relpath', 'path', 'relpath' ]: p = test.get(k) if p: test[k] = p.replace('/', os.path.sep) tests[path].append(test) # dump tests to stdout for easier debugging on failure print("The 'create_tests' fixture returned:") print(json.dumps(dict(tests), indent=2, sort_keys=True)) return tests
def _make_install_manifest(topsrcdir, topobjdir, test_objs): flavor_info = { flavor: (root, prefix, install) for (flavor, root, prefix, install) in TEST_MANIFESTS.values() } converter = SupportFilesConverter() install_info = TestInstallInfo() for o in test_objs: flavor = o['flavor'] if flavor not in flavor_info: # This is a test flavor that isn't installed by the build system. continue root, prefix, install = flavor_info[flavor] if not install: # This flavor isn't installed to the objdir. continue manifest_path = o['manifest'] manifest_dir = mozpath.dirname(manifest_path) out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:]) file_relpath = o['file_relpath'] source = mozpath.join(topsrcdir, file_relpath) dest = mozpath.join(root, prefix, file_relpath) if 'install-to-subdir' in o: out_dir = mozpath.join(out_dir, o['install-to-subdir']) manifest_relpath = mozpath.relpath(source, mozpath.dirname(manifest_path)) dest = mozpath.join(out_dir, manifest_relpath) install_info.installs.append((source, dest)) install_info |= converter.convert_support_files( o, root, manifest_dir, out_dir) manifest = InstallManifest() for source, dest in set(install_info.installs): if dest in install_info.external_installs: continue manifest.add_link(source, dest) for base, pattern, dest in install_info.pattern_installs: manifest.add_pattern_link(base, pattern, dest) _resolve_installs(install_info.deferred_installs, topobjdir, manifest) return manifest
def context(self, paths, **kwargs): if not paths: return for p in paths: if not os.path.exists(p): print("error: '{}' is not a valid path.".format(p), file=sys.stderr) sys.exit(1) paths = [mozpath.relpath(mozpath.join(os.getcwd(), p), build.topsrcdir) for p in paths] return { 'env': { 'MOZHARNESS_TEST_PATHS': json.dumps(resolve_tests_by_suite(paths)), } }
def relpath(self): """Return a path relative to the topsrcdir or topobjdir. If the argument is a path to a location in one of the base directories (topsrcdir or topobjdir), then strip off the base directory part and just return the path within the base directory.""" abspath = os.path.abspath(os.path.join(self.cwd, self.arg)) # If that path is within topsrcdir or topobjdir, return an equivalent # path relative to that base directory. for base_dir in [self.topobjdir, self.topsrcdir]: if abspath.startswith(os.path.abspath(base_dir)): return mozpath.relpath(abspath, base_dir) return mozpath.normsep(self.arg)
def __init__(self, srcdir, objdir, environment, topsrcdir, topobjdir): self.topsrcdir = topsrcdir self.srcdir = srcdir self.objdir = objdir self.relobjdir = mozpath.relpath(objdir, topobjdir) self.environment = environment self.name = mozpath.join(objdir, 'Tupfile') self.rules_included = False self.shell_exported = False self.defines = [] self.host_defines = [] self.delayed_generated_files = [] self.fh = FileAvoidWrite(self.name, capture_diff=True) self.fh.write('# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n') self.fh.write('\n')
def _build_db_line(self, objdir, reldir, cenv, filename, canonical_suffix, unified=None): if canonical_suffix not in self.COMPILERS: return db = self._db.setdefault((objdir, filename, unified), cenv.substs[self.COMPILERS[canonical_suffix]].split() + ['-o', '/dev/null', '-c']) reldir = reldir or mozpath.relpath(objdir, cenv.topobjdir) def append_var(name): value = cenv.substs.get(name) if not value: return if isinstance(value, types.StringTypes): value = value.split() db.extend(value) if canonical_suffix in ('.mm', '.cpp'): db.append('$(STL_FLAGS)') db.extend(( '$(VISIBILITY_FLAGS)', '$(DEFINES)', '-I%s' % mozpath.join(cenv.topsrcdir, reldir), '-I%s' % objdir, '$(LOCAL_INCLUDES)', '-I%s/dist/include' % cenv.topobjdir, '$(EXTRA_INCLUDES)', )) append_var('DSO_CFLAGS') append_var('DSO_PIC_CFLAGS') if canonical_suffix in ('.c', '.cpp'): db.append('$(RTL_FLAGS)') append_var('OS_COMPILE_%s' % self.CFLAGS[canonical_suffix]) append_var('OS_CPPFLAGS') append_var('OS_%s' % self.CFLAGS[canonical_suffix]) append_var('MOZ_DEBUG_FLAGS') append_var('MOZ_OPTIMIZE_FLAGS') append_var('MOZ_FRAMEPTR_FLAGS') db.append('$(WARNINGS_AS_ERRORS)') db.append('$(MOZBUILD_%s)' % self.CFLAGS[canonical_suffix]) if canonical_suffix == '.m': append_var('OS_COMPILE_CMFLAGS') db.append('$(MOZBUILD_CMFLAGS)') elif canonical_suffix == '.mm': append_var('OS_COMPILE_CMMFLAGS') db.append('$(MOZBUILD_CMMFLAGS)')
def _find_relevant_mozbuilds(self, paths): """Given a set of filesystem paths, find all relevant moz.build files. We assume that a moz.build file in the directory ancestry of a given path is relevant to that path. Let's say we have the following files on disk:: moz.build foo/moz.build foo/baz/moz.build foo/baz/file1 other/moz.build other/file2 If ``foo/baz/file1`` is passed in, the relevant moz.build files are ``moz.build``, ``foo/moz.build``, and ``foo/baz/moz.build``. For ``other/file2``, the relevant moz.build files are ``moz.build`` and ``other/moz.build``. Returns a dict of input paths to a list of relevant moz.build files. The root moz.build file is first and the leaf-most moz.build is last. """ root = self.config.topsrcdir result = {} @memoize def exists(path): return os.path.exists(path) def itermozbuild(path): subpath = '' yield 'moz.build' for part in mozpath.split(path): subpath = mozpath.join(subpath, part) yield mozpath.join(subpath, 'moz.build') for path in sorted(paths): path = mozpath.normpath(path) if os.path.isabs(path): if not mozpath.basedir(path, [root]): raise Exception('Path outside topsrcdir: %s' % path) path = mozpath.relpath(path, root) result[path] = [ p for p in itermozbuild(path) if exists(mozpath.join(root, p)) ] return result
def test_test_defaults_metadata_file_written(self): """Ensure test-defaults.pkl is generated.""" env = self._consume('test-manifests-written', TestManifestBackend) test_defaults_path = mozpath.join(env.topobjdir, 'test-defaults.pkl') self.assertTrue(os.path.exists(test_defaults_path)) with open(test_defaults_path, 'rb') as fh: o = {mozpath.normpath(k): v for k, v in pickle.load(fh).items()} self.assertEquals( set(mozpath.relpath(k, env.topsrcdir) for k in o.keys()), set(['dir1/xpcshell.ini', 'xpcshell.ini', 'mochitest.ini'])) manifest_path = mozpath.join(env.topsrcdir, 'xpcshell.ini') self.assertIn('here', o[manifest_path]) self.assertIn('support-files', o[manifest_path])
def describe_install_manifest(manifest, dest_dir): try: manifest = InstallManifest(manifest) except UnreadableInstallManifest: raise IOError(errno.EINVAL, 'Error parsing manifest file', manifest) reg = FileRegistry() mapping = {} manifest.populate_registry(reg) for dest_file, src in reg: if hasattr(src, 'path'): dest_path = mozpath.join(dest_dir, dest_file) relsrc_path = mozpath.relpath(src.path, buildconfig.topsrcdir) mapping[dest_path] = relsrc_path return mapping
def run_b2g_test(self, b2g_home=None, xre_path=None, **kwargs): """Runs a b2g reftest. filter is a regular expression (in JS syntax, as could be passed to the RegExp constructor) to select which reftests to run from the manifest. test_file is a path to a test file. It can be a relative path from the top source directory, an absolute filename, or a directory containing test files. suite is the type of reftest to run. It can be one of ('reftest', 'crashtest'). """ if kwargs["suite"] not in ('reftest', 'crashtest'): raise Exception('None or unrecognized reftest suite type.') sys.path.insert(0, self.reftest_dir) test_subdir = { "reftest": os.path.join('layout', 'reftests'), "crashtest": os.path.join('layout', 'crashtest') }[kwargs["suite"]] # Find the manifest file if not kwargs["tests"]: if not os.path.exists(os.path.join(self.topsrcdir, test_subdir)): test_file = mozpath.relpath(os.path.abspath(test_subdir), self.topsrcdir) kwargs["tests"] = [test_subdir] tests = os.path.join(self.reftest_dir, 'tests') if not os.path.isdir(tests): os.symlink(self.topsrcdir, tests) for i, path in enumerate(kwargs["tests"]): # Non-absolute paths are relative to the packaged directory, which # has an extra tests/ at the start if os.path.exists(os.path.abspath(path)): path = os.path.relpath(path, os.path.join(self.topsrcdir)) kwargs["tests"][i] = os.path.join('tests', path) if conditions.is_b2g_desktop(self): return self.run_b2g_desktop(**kwargs) return self.run_b2g_remote(b2g_home, xre_path, **kwargs)
def _get_formatter(self, path, is_resource=None): ''' Return the (sub)formatter corresponding to the given path, its base directory and the path relative to that base. ''' base = self._get_base(path) use_omnijar = base not in self._addons if use_omnijar: if is_resource is None: is_resource = self.is_resource(path, base) use_omnijar = is_resource if not use_omnijar: return super(OmniJarFormatter, self), '', path if not base in self.omnijars: omnijar = Jarrer(self._compress, self._optimize) self.omnijars[base] = FlatFormatter(omnijar) self.copier.add(mozpath.join(base, self._omnijar_name), omnijar) return self.omnijars[base], base, mozpath.relpath(path, base)
def test_test_defaults_metadata_file_written(self): """Ensure test-defaults.pkl is generated.""" env = self._consume("test-manifests-written", TestManifestBackend) test_defaults_path = mozpath.join(env.topobjdir, "test-defaults.pkl") self.assertTrue(os.path.exists(test_defaults_path)) with open(test_defaults_path, "rb") as fh: o = {mozpath.normpath(k): v for k, v in pickle.load(fh).items()} self.assertEquals( set(mozpath.relpath(k, env.topsrcdir) for k in o.keys()), set(["dir1/xpcshell.ini", "xpcshell.ini", "mochitest.ini"]), ) manifest_path = mozpath.join(env.topsrcdir, "xpcshell.ini") self.assertIn("here", o[manifest_path]) self.assertIn("support-files", o[manifest_path])
def _get_files_info(command_context, paths, rev=None): reader = command_context.mozbuild_reader(config_mode="empty", vcs_revision=rev) # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [command_context.topsrcdir]): raise InvalidPathException("path is outside topsrcdir: %s" % p) relpaths.append(mozpath.relpath(a, command_context.topsrcdir)) # Expand wildcards. # One variable is for ordering. The other for membership tests. # (Membership testing on a list can be slow.) allpaths = [] all_paths_set = set() for p in relpaths: if "*" not in p: if p not in all_paths_set: if not os.path.exists( mozpath.join(command_context.topsrcdir, p)): print("(%s does not exist; ignoring)" % p, file=sys.stderr) continue all_paths_set.add(p) allpaths.append(p) continue if rev: raise InvalidPathException( "cannot use wildcard in version control mode") # finder is rooted at / for now. # TODO bug 1171069 tracks changing to relative. search = mozpath.join(command_context.topsrcdir, p)[1:] for path, f in reader.finder.find(search): path = path[len(command_context.topsrcdir):] if path not in all_paths_set: all_paths_set.add(path) allpaths.append(path) return reader.files_info(allpaths)
def main(args): parser = argparse.ArgumentParser() parser.add_argument("--base-dir", default=os.path.join(buildconfig.topobjdir, "dist", "bin"), help="Store paths relative to this directory") parser.add_argument("zip", help="Path to zip file to write") parser.add_argument("input", nargs="+", help="Path to files to add to zip") args = parser.parse_args(args) jarrer = Jarrer(optimize=False) with errors.accumulate(): finder = FileFinder(args.base_dir) for i in args.input: path = mozpath.relpath(i, args.base_dir) for p, f in finder.find(path): jarrer.add(p, f) jarrer.copy(args.zip)
def files_info(self, paths): """Obtain aggregate data from Files for a set of files. Given a set of input paths, determine which moz.build files may define metadata for them, evaluate those moz.build files, and apply file metadata rules defined within to determine metadata values for each file requested. Essentially, for each input path: 1. Determine the set of moz.build files relevant to that file by looking for moz.build files in ancestor directories. 2. Evaluate moz.build files starting with the most distant. 3. Iterate over Files sub-contexts. 4. If the file pattern matches the file we're seeking info on, apply attribute updates. 5. Return the most recent value of attributes. """ paths, _ = self.read_relevant_mozbuilds(paths) r = {} for path, ctxs in paths.items(): flags = Files(Context()) for ctx in ctxs: if not isinstance(ctx, Files): continue relpath = mozpath.relpath(path, ctx.relsrcdir) pattern = ctx.pattern # Only do wildcard matching if the '*' character is present. # Otherwise, mozpath.match will match directories, which we've # arbitrarily chosen to not allow. if pattern == relpath or \ ('*' in pattern and mozpath.match(relpath, pattern)): flags += ctx r[path] = flags return r
def process_tests_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: reader = JarReader(filename) for filename, entry in reader.entries.iteritems(): for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns: if not mozpath.match(filename, pattern): continue destpath = mozpath.relpath(filename, src_prefix) destpath = mozpath.join(dest_prefix, destpath) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) added_entry = True break for files_entry in OBJDIR_TEST_FILES.values(): origin_pattern = files_entry['pattern'] leaf_filename = filename if 'dest' in files_entry: dest = files_entry['dest'] origin_pattern = mozpath.join(dest, origin_pattern) leaf_filename = filename[len(dest) + 1:] if mozpath.match(filename, origin_pattern): destpath = mozpath.join('..', files_entry['base'], leaf_filename) mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) if not added_entry: raise ValueError( 'Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=LinuxArtifactJob.test_artifact_patterns))
def process_package_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))): if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns): continue # strip off the relative "firefox/" bit from the path: basename = mozpath.relpath(p, "firefox") basename = mozpath.join('bin', basename) self.log(logging.INFO, 'artifact', {'basename': basename}, 'Adding {basename} to processed archive') writer.add(basename.encode('utf-8'), f.open(), mode=f.mode) added_entry = True if not added_entry: raise ValueError('Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=self.artifact_patterns))
def file_info_test_deps(self, paths, rev=None): try: for p, m in self._get_files_info(paths, rev=rev).items(): print('%s:' % mozpath.relpath(p, self.topsrcdir)) if m.test_files: print('\tTest file patterns:') for p in m.test_files: print('\t\t%s' % p) if m.test_tags: print('\tRelevant tags:') for p in m.test_tags: print('\t\t%s' % p) if m.test_flavors: print('\tRelevant flavors:') for p in m.test_flavors: print('\t\t%s' % p) except InvalidPathException as e: print(e.message) return 1