def resolve_files(): """Resolve the files that constitute a standalone toolchain. This is a generator of (dest path, file) where the destination path is relative and the file instance is a BaseFile from mozpack. """ vs_path, sdk_path = find_vs_paths() for entry in VS_PATTERNS: finder = FileFinder(vs_path, find_executables=False, ignore=entry.get('ignore', [])) for p, f in finder.find(entry['pattern']): assert p.startswith(('VC/', 'DIA SDK/')) for source, dest in entry.get('rewrite', []): p = p.replace(source, dest) yield p.encode('utf-8'), f for entry in SDK_PATTERNS: finder = FileFinder(sdk_path, find_executables=False, ignore=entry.get('ignore', [])) for p, f in finder.find(entry['pattern']): # We remove the SDK version from the path so we don't have # to update other configs when we change the SDK version. p = p.replace('/%s/' % SDK_RELEASE, '/') relpath = 'SDK/%s' % p yield relpath.encode('utf-8'), f
def resolve_files(): """Resolve the files that constitute a standalone toolchain. This is a generator of (dest path, file) where the destination path is relative and the file instance is a BaseFile from mozpack. """ vs_path, sdk_path = find_vs_paths() for entry in VS_PATTERNS: finder = FileFinder(vs_path, find_executables=False, ignore=entry.get('ignore', [])) for p, f in finder.find(entry['pattern']): assert p.startswith(('VC/', 'DIA SDK/')) yield p.encode('utf-8'), f for entry in SDK_PATTERNS: finder = FileFinder(sdk_path, find_executables=False, ignore=entry.get('ignore', [])) for p, f in finder.find(entry['pattern']): relpath = 'SDK/%s' % p yield relpath.encode('utf-8'), f
def _extract(self, src, dest): """extract source distribution into vendor directory""" finder = FileFinder(src) for path, _ in finder.find('*'): base, ext = os.path.splitext(path) if ext == '.whl': # Wheels would extract into a directory with the name of the package, but # we want the platform signifiers, minus the version number. # Wheel filenames look like: # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag} bits = base.split('-') # Remove the version number. bits.pop(1) target = os.path.join(dest, '-'.join(bits)) mozfile.remove( target) # remove existing version of vendored package os.mkdir(target) mozfile.extract(os.path.join(finder.base, path), target) else: # packages extract into package-version directory name and we strip the version tld = mozfile.extract(os.path.join(finder.base, path), dest)[0] target = os.path.join(dest, tld.rpartition('-')[0]) mozfile.remove( target) # remove existing version of vendored package mozfile.move(tld, target) # If any files inside the vendored package were symlinks, turn them into normal files # because hg.mozilla.org forbids symlinks in the repository. link_finder = FileFinder(target) for _, f in link_finder.find('**'): if os.path.islink(f.path): link_target = os.path.realpath(f.path) os.unlink(f.path) shutil.copyfile(link_target, f.path)
def get_generated_sources(): ''' Yield tuples of `(objdir-rel-path, file)` for generated source files in this objdir, where `file` is either an absolute path to the file or a `mozpack.File` instance. ''' import buildconfig # First, get the list of generated sources produced by the build backend. gen_sources = os.path.join(buildconfig.topobjdir, 'generated-sources.json') with open(gen_sources, 'rb') as f: data = json.load(f) for f in data['sources']: yield f, mozpath.join(buildconfig.topobjdir, f) # Next, return all the files in $objdir/ipc/ipdl/_ipdlheaders. base = 'ipc/ipdl/_ipdlheaders' finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) for p, f in finder.find('**/*.h'): yield mozpath.join(base, p), f # Next, return any Rust source files that were generated into the Rust # object directory. rust_build_kind = 'debug' if buildconfig.substs.get('MOZ_DEBUG_RUST') else 'release' base = mozpath.join('toolkit/library', buildconfig.substs['RUST_TARGET'], rust_build_kind, 'build') finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) for p, f in finder.find('**/*.rs'): yield mozpath.join(base, p), f
def package_geckolibs_aar(topsrcdir, distdir, output_file): jarrer = Jarrer(optimize=False) srcdir = os.path.join(topsrcdir, 'mobile', 'android', 'geckoview_library', 'geckolibs') jarrer.add('AndroidManifest.xml', File(os.path.join(srcdir, 'AndroidManifest.xml'))) jarrer.add('classes.jar', File(os.path.join(srcdir, 'classes.jar'))) jni = FileFinder(os.path.join(distdir, 'fennec', 'lib')) for p, f in jni.find('**/*.so'): jarrer.add(os.path.join('jni', p), f) # Include the buildinfo JSON as an asset, to give future consumers at least # a hope of determining where this AAR came from. json = FileFinder(distdir, ignore=['*.mozinfo.json']) for p, f in json.find('*.json'): jarrer.add(os.path.join('assets', p), f) # This neatly ignores omni.ja. assets = FileFinder(os.path.join(distdir, 'fennec', 'assets')) for p, f in assets.find('**/*.so'): jarrer.add(os.path.join('assets', p), f) jarrer.copy(output_file) return 0
def get_generated_sources(): ''' Yield tuples of `(objdir-rel-path, file)` for generated source files in this objdir, where `file` is either an absolute path to the file or a `mozpack.File` instance. ''' import buildconfig # First, get the list of generated sources produced by the build backend. gen_sources = os.path.join(buildconfig.topobjdir, 'generated-sources.json') with open(gen_sources, 'rb') as f: data = json.load(f) for f in data['sources']: yield f, mozpath.join(buildconfig.topobjdir, f) # Next, return all the files in $objdir/ipc/ipdl/_ipdlheaders. base = 'ipc/ipdl/_ipdlheaders' finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) for p, f in finder.find('**/*.h'): yield mozpath.join(base, p), f # Next, return any Rust source files that were generated into the Rust # object directory. rust_build_kind = 'debug' if buildconfig.substs.get( 'MOZ_DEBUG_RUST') else 'release' base = mozpath.join(buildconfig.substs['RUST_TARGET'], rust_build_kind, 'build') finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) for p, f in finder.find('**/*.rs'): yield mozpath.join(base, p), f
def _extract(self, src, dest, keep_extra_files=False): """extract source distribution into vendor directory""" ignore = () if not keep_extra_files: ignore = ( "*/doc", "*/docs", "*/test", "*/tests", ) finder = FileFinder(src) for path, _ in finder.find("*"): base, ext = os.path.splitext(path) # packages extract into package-version directory name and we strip the version tld = mozfile.extract(os.path.join(finder.base, path), dest, ignore=ignore)[0] target = os.path.join(dest, tld.rpartition("-")[0]) mozfile.remove( target) # remove existing version of vendored package mozfile.move(tld, target) # If any files inside the vendored package were symlinks, turn them into normal files # because hg.mozilla.org forbids symlinks in the repository. link_finder = FileFinder(target) for _, f in link_finder.find("**"): if os.path.islink(f.path): link_target = os.path.realpath(f.path) os.unlink(f.path) shutil.copyfile(link_target, f.path)
def _parse_android_test_results(config, topsrcdir=None, report_dir=None): # A brute force way to turn a Java FQN into a path on disk. Assumes Java # and Kotlin sources are in mobile/android for performance and simplicity. sourcepath_finder = FileFinder(os.path.join(topsrcdir, "mobile", "android")) finder = FileFinder(report_dir) reports = list(finder.find("TEST-*.xml")) if not reports: raise RuntimeError("No reports found under {}".format(report_dir)) for report, _ in reports: tree = ET.parse(open(os.path.join(finder.base, report), "rt")) root = tree.getroot() class_name = root.get( "name") # Like 'org.mozilla.gecko.permissions.TestPermissions'. path = ( "**/" + class_name.replace(".", "/") + ".*" ) # Like '**/org/mozilla/gecko/permissions/TestPermissions.*'. # NOQA: E501 for testcase in root.findall("testcase"): function_name = testcase.get("name") # Schema cribbed from http://llg.cubic.org/docs/junit/. for unexpected in itertools.chain(testcase.findall("error"), testcase.findall("failure")): sourcepaths = list(sourcepath_finder.find(path)) if not sourcepaths: raise RuntimeError( "No sourcepath found for class {class_name}".format( class_name=class_name)) for sourcepath, _ in sourcepaths: lineno = 0 message = unexpected.get("message") # Turn '... at org.mozilla.gecko.permissions.TestPermissions.testMultipleRequestsAreQueuedAndDispatchedSequentially(TestPermissions.java:118)' into 118. # NOQA: E501 pattern = r"at {class_name}\.{function_name}\(.*:(\d+)\)" pattern = pattern.format(class_name=class_name, function_name=function_name) match = re.search(pattern, message) if match: lineno = int(match.group(1)) else: msg = "No source line found for {class_name}.{function_name}".format( class_name=class_name, function_name=function_name) raise RuntimeError(msg) err = { "level": "error", "rule": unexpected.get("type"), "message": message, "path": os.path.join("mobile", "android", sourcepath), "lineno": lineno, } yield result.from_config(config, **err)
def _parse_android_test_results(config, topsrcdir=None, report_dir=None): # A brute force way to turn a Java FQN into a path on disk. Assumes Java # and Kotlin sources are in mobile/android for performance and simplicity. sourcepath_finder = FileFinder(os.path.join(topsrcdir, 'mobile', 'android')) finder = FileFinder(report_dir) reports = list(finder.find('TEST-*.xml')) if not reports: raise RuntimeError('No reports found under {}'.format(report_dir)) for report, _ in reports: tree = ET.parse(open(os.path.join(finder.base, report), 'rt')) root = tree.getroot() class_name = root.get( 'name') # Like 'org.mozilla.gecko.permissions.TestPermissions'. path = '**/' + class_name.replace( '.', '/' ) + '.*' # Like '**/org/mozilla/gecko/permissions/TestPermissions.*'. # NOQA: E501 for testcase in root.findall('testcase'): function_name = testcase.get('name') # Schema cribbed from http://llg.cubic.org/docs/junit/. for unexpected in itertools.chain(testcase.findall('error'), testcase.findall('failure')): sourcepaths = list(sourcepath_finder.find(path)) if not sourcepaths: raise RuntimeError( 'No sourcepath found for class {class_name}'.format( class_name=class_name)) for sourcepath, _ in sourcepaths: lineno = 0 message = unexpected.get('message') # Turn '... at org.mozilla.gecko.permissions.TestPermissions.testMultipleRequestsAreQueuedAndDispatchedSequentially(TestPermissions.java:118)' into 118. # NOQA: E501 pattern = r'at {class_name}\.{function_name}\(.*:(\d+)\)' pattern = pattern.format(class_name=class_name, function_name=function_name) match = re.search(pattern, message) if match: lineno = int(match.group(1)) else: msg = 'No source line found for {class_name}.{function_name}'.format( class_name=class_name, function_name=function_name) raise RuntimeError(msg) err = { 'level': 'error', 'rule': unexpected.get('type'), 'message': message, 'path': os.path.join('mobile', 'android', sourcepath), 'lineno': lineno, } yield result.from_config(config, **err)
def explode(aar, destdir): # Take just the support-v4-22.2.1 part. name, _ = os.path.splitext(os.path.basename(aar)) destdir = mozpath.join(destdir, name) if os.path.exists(destdir): # We always want to start fresh. shutil.rmtree(destdir) ensureParentDir(destdir) with zipfile.ZipFile(aar) as zf: zf.extractall(destdir) # classes.jar is always present. However, multiple JAR files with the same # name confuses our staged Proguard process in # mobile/android/base/Makefile.in, so we make the names unique here. classes_jar = mozpath.join(destdir, name + '-classes.jar') os.rename(mozpath.join(destdir, 'classes.jar'), classes_jar) # Embedded JAR libraries are optional. finder = FileFinder(mozpath.join(destdir, 'libs')) for p, _ in finder.find('*.jar'): jar = mozpath.join(finder.base, name + '-' + p) os.rename(mozpath.join(finder.base, p), jar) # Frequently assets/ is present but empty. Protect against meaningless # changes to the AAR files by deleting empty assets/ directories. assets = mozpath.join(destdir, 'assets') try: os.rmdir(assets) except OSError, e: if e.errno in (errno.ENOTEMPTY, errno.ENOENT): pass else: raise
def find_files(archive): for entry in ARCHIVE_FILES[archive]: source = entry['source'] base = entry.get('base', '') pattern = entry.get('pattern') patterns = entry.get('patterns', []) if pattern: patterns.append(pattern) dest = entry.get('dest') ignore = list(entry.get('ignore', [])) ignore.append('**/.mkdir.done') ignore.append('**/*.pyc') common_kwargs = { 'find_executables': False, 'find_dotfiles': True, 'ignore': ignore, } finder = FileFinder(os.path.join(source, base), **common_kwargs) for pattern in patterns: for p, f in finder.find(pattern): if dest: p = mozpath.join(dest, p) yield p, f
def all_mozbuild_paths(self): """Iterator over all available moz.build files. This method has little to do with the reader. It should arguably belong elsewhere. """ # In the future, we may traverse moz.build files by looking # for DIRS references in the AST, even if a directory is added behind # a conditional. For now, just walk the filesystem. ignore = { # Ignore fake moz.build files used for testing moz.build. 'python/mozbuild/mozbuild/test', # Ignore object directories. 'obj*', } finder = FileFinder(self.config.topsrcdir, find_executables=False, ignore=ignore) # The root doesn't get picked up by FileFinder. yield 'moz.build' for path, f in finder.find('**/moz.build'): yield path
def _get_files_info(self, paths): from mozpack.files import FileFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) finder = FileFinder(self.topsrcdir, find_executables=False) # Expand wildcards. allpaths = [] for p in relpaths: if '*' not in p: if p not in allpaths: allpaths.append(p) continue for path, f in finder.find(p): if path not in allpaths: allpaths.append(path) reader = self._get_reader() return reader.files_info(allpaths)
def explode(aar, destdir): # Take just the support-v4-22.2.1 part. name, _ = os.path.splitext(os.path.basename(aar)) destdir = mozpath.join(destdir, name) if os.path.exists(destdir): # We always want to start fresh. shutil.rmtree(destdir) ensureParentDir(destdir) with zipfile.ZipFile(aar) as zf: zf.extractall(destdir) # classes.jar is always present. However, multiple JAR files with the same # name confuses our staged Proguard process in # mobile/android/base/Makefile.in, so we make the names unique here. classes_jar = mozpath.join(destdir, name + '-classes.jar') os.rename(mozpath.join(destdir, 'classes.jar'), classes_jar) # Embedded JAR libraries are optional. finder = FileFinder(mozpath.join(destdir, 'libs'), find_executables=False) for p, _ in finder.find('*.jar'): jar = mozpath.join(finder.base, name + '-' + p) os.rename(mozpath.join(finder.base, p), jar) # Frequently assets/ is present but empty. Protect against meaningless # changes to the AAR files by deleting empty assets/ directories. assets = mozpath.join(destdir, 'assets') try: os.rmdir(assets) except OSError, e: if e.errno in (errno.ENOTEMPTY, errno.ENOENT): pass else: raise
def get_generated_sources(): """ Yield tuples of `(objdir-rel-path, file)` for generated source files in this objdir, where `file` is either an absolute path to the file or a `mozpack.File` instance. """ import buildconfig # First, get the list of generated sources produced by the build backend. gen_sources = os.path.join(buildconfig.topobjdir, "generated-sources.json") with open(gen_sources, "r") as f: data = json.load(f) for f in data["sources"]: # Exclute symverscript if mozpath.basename(f) != "symverscript": yield f, mozpath.join(buildconfig.topobjdir, f) # Next, return all the files in $objdir/ipc/ipdl/_ipdlheaders. base = "ipc/ipdl/_ipdlheaders" finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) for p, f in finder.find("**/*.h"): yield mozpath.join(base, p), f # Next, return any source files that were generated into the Rust # object directory. rust_build_kind = "debug" if buildconfig.substs.get( "MOZ_DEBUG_RUST") else "release" base = mozpath.join(buildconfig.substs["RUST_TARGET"], rust_build_kind, "build") finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) for p, f in finder: if p.endswith((".rs", ".c", ".h", ".cc", ".cpp")): yield mozpath.join(base, p), f
def update_uuids(self, path, interfaces): import os import xpidl from mozpack.files import FileFinder import mozpack.path from tempfile import mkdtemp finder = FileFinder(path, find_executables=False) # Avoid creating xpidllex and xpidlyacc in the current directory. tmpdir = mkdtemp() try: parser = xpidl.IDLParser(outputdir=tmpdir) registry = InterfaceRegistry() for p, f in finder.find('**/*.idl'): p = mozpack.path.join(path, p) try: content = f.open().read() idl = parser.parse(content, filename=p) except Exception: continue for prod in idl.productions: if isinstance(prod, xpidl.Interface): registry.add(Interface(p, prod)) finally: import shutil shutil.rmtree(tmpdir) updates = IDLUpdater(registry) for interface in interfaces: updates.add(interface) updates.update()
def find_shell_scripts(config, paths): found = dict() root = config["root"] exclude = [mozpath.join(root, e) for e in config.get("exclude", [])] if config.get("extensions"): pattern = "**/*.{}".format(config.get("extensions")[0]) else: pattern = "**/*.sh" files = [] for path in paths: path = mozpath.normsep(path) ignore = [ e[len(path):].lstrip("/") for e in exclude if mozpath.commonprefix((path, e)) == path ] finder = FileFinder(path, ignore=ignore) files.extend([os.path.join(path, p) for p, f in finder.find(pattern)]) for filename in files: shell = determine_shell_from_script(filename) if shell: found[filename] = shell return found
def _handle_manifest_entry(self, entry, jars): jarpath = None if isinstance(entry, ManifestEntryWithRelPath) and \ urlparse(entry.relpath).scheme == 'jar': jarpath, entry = self._unjarize(entry, entry.relpath) elif isinstance(entry, ManifestResource) and \ urlparse(entry.target).scheme == 'jar': jarpath, entry = self._unjarize(entry, entry.target) if jarpath: # Don't defer unpacking the jar file. If we already saw # it, take (and remove) it from the registry. If we # haven't, try to find it now. if self.files.contains(jarpath): jar = self.files[jarpath] self.files.remove(jarpath) else: jar = [f for p, f in FileFinder.find(self, jarpath)] assert len(jar) == 1 jar = jar[0] if not jarpath in jars: base = mozpack.path.splitext(jarpath)[0] for j in self._open_jar(jarpath, jar): self.files.add(mozpack.path.join(base, j.filename), DeflatedFile(j)) jars.add(jarpath) self.kind = 'jar' return entry
def hash_paths(base_path, patterns): """ Give a list of path patterns, return a digest of the contents of all the corresponding files, similarly to git tree objects or mercurial manifests. Each file is hashed. The list of all hashes and file paths is then itself hashed to produce the result. """ finder = FileFinder(base_path) h = hashlib.sha256() files = {} for pattern in patterns: found = list(finder.find(pattern)) if found: files.update(found) else: raise Exception('%s did not match anything' % pattern) for path in sorted(files.keys()): if path.endswith(('.pyc', '.pyd', '.pyo')): continue h.update( six.ensure_binary('{} {}\n'.format( hash_path(mozpath.abspath(mozpath.join(base_path, path))), mozpath.normsep(path)))) return h.hexdigest()
def run_linter(python, paths, config, **lintargs): binary = find_executable(python) if not binary: # TODO bootstrap python3 if not available print('error: {} not detected, aborting py-compat check'.format(python)) if 'MOZ_AUTOMATION' in os.environ: return 1 return [] pattern = "**/*.py" exclude = lintargs.get('exclude', []) files = [] for path in paths: if os.path.isfile(path): files.append(path) continue finder = FileFinder(path, ignore=exclude) files.extend([os.path.join(path, p) for p, f in finder.find(pattern)]) with tempfile.NamedTemporaryFile(mode='w') as fh: fh.write('\n'.join(files)) fh.flush() cmd = [binary, os.path.join(here, 'check_compat.py'), fh.name] proc = PyCompatProcess(config, cmd) proc.run() try: proc.wait() except KeyboardInterrupt: proc.kill() return results
def walk_topsrcdir(self): """Read all moz.build files in the source tree. This is different from read_topsrcdir() in that this version performs a filesystem walk to discover every moz.build file rather than relying on data from executed moz.build files to drive traversal. This is a generator of Sandbox instances. """ # In the future, we may traverse moz.build files by looking # for DIRS references in the AST, even if a directory is added behind # a conditional. For now, just walk the filesystem. ignore = { # Ignore fake moz.build files used for testing moz.build. 'python/mozbuild/mozbuild/test', # Ignore object directories. 'obj*', } finder = FileFinder(self.topsrcdir, find_executables=False, ignore=ignore) for path, f in finder.find('**/moz.build'): path = os.path.join(self.topsrcdir, path) for s in self.read_mozbuild(path, self.config, descend=False, filesystem_absolute=True, read_tiers=True): yield s
def walk_topsrcdir(self): """Read all moz.build files in the source tree. This is different from read_topsrcdir() in that this version performs a filesystem walk to discover every moz.build file rather than relying on data from executed moz.build files to drive traversal. This is a generator of Sandbox instances. """ # In the future, we may traverse moz.build files by looking # for DIRS references in the AST, even if a directory is added behind # a conditional. For now, just walk the filesystem. ignore = { # Ignore fake moz.build files used for testing moz.build. 'python/mozbuild/mozbuild/test', # Ignore object directories. 'obj*', } finder = FileFinder(self.topsrcdir, find_executables=False, ignore=ignore) for path, f in finder.find('**/moz.build'): path = os.path.join(self.topsrcdir, path) for s in self.read_mozbuild(path, descend=False, filesystem_absolute=True, read_tiers=True): yield s
def package_geckoview_aar(topsrcdir, distdir, output_file): jarrer = Jarrer(optimize=False) fennec_path = os.path.join(distdir, 'fennec') assets = FileFinder(os.path.join(fennec_path, 'assets'), ignore=['*.so']) for p, f in assets.find('omni.ja'): jarrer.add(os.path.join('assets', p), f) # The folder that contains Fennec's JAR files and resources. base_path = os.path.join(distdir, '..', 'mobile', 'android', 'base') # The resource set is packaged during Fennec's build. resjar = JarReader(os.path.join(base_path, 'geckoview_resources.zip')) for p, f in JarFinder(p, resjar).find('*'): jarrer.add(os.path.join('res', p), f) # Package the contents of all Fennec JAR files into classes.jar. classes_jar_file = _generate_geckoview_classes_jar(distdir, base_path) jarrer.add('classes.jar', classes_jar_file) # Add R.txt. jarrer.add('R.txt', File(os.path.join(base_path, 'R.txt'))) # Finally add AndroidManifest.xml. srcdir = os.path.join(topsrcdir, 'mobile', 'android', 'geckoview_library', 'geckoview') jarrer.add('AndroidManifest.xml', File(os.path.join(srcdir, 'AndroidManifest.xml'))) jarrer.copy(output_file) return 0
def package_geckoview_aar(topsrcdir, distdir, appname, output_file): jarrer = Jarrer(optimize=False) app_path = os.path.join(distdir, appname) assets = FileFinder(os.path.join(app_path, 'assets'), ignore=['*.so']) for p, f in assets.find('omni.ja'): jarrer.add(os.path.join('assets', p), f) # The folder that contains Fennec's JAR files and resources. base_path = os.path.join(distdir, '..', 'mobile', 'android', 'base') # The resource set is packaged during Fennec's build. resjar = JarReader(os.path.join(base_path, 'geckoview_resources.zip')) for p, f in JarFinder(base_path, resjar).find('*'): jarrer.add(os.path.join('res', p), f) # Package the contents of all Fennec JAR files into classes.jar. classes_jar_file = _generate_geckoview_classes_jar(distdir, base_path) jarrer.add('classes.jar', classes_jar_file) # Add R.txt. jarrer.add('R.txt', File(os.path.join(base_path, 'R.txt'))) # Finally add AndroidManifest.xml. srcdir = os.path.join(topsrcdir, 'mobile', 'android', 'geckoview_library', 'geckoview') jarrer.add('AndroidManifest.xml', File(os.path.join(srcdir, 'AndroidManifest.xml'))) jarrer.copy(output_file) return 0
def main(args): parser = argparse.ArgumentParser() parser.add_argument("-C", metavar='DIR', default=".", help="Change to given directory before considering " "other paths") parser.add_argument("--strip", action='store_true', help="Strip executables") parser.add_argument("-x", metavar='EXCLUDE', default=[], action='append', help="Exclude files that match the pattern") parser.add_argument("zip", help="Path to zip file to write") parser.add_argument("input", nargs="+", help="Path to files to add to zip") args = parser.parse_args(args) jarrer = Jarrer() with errors.accumulate(): finder = FileFinder(args.C, find_executables=args.strip) for path in args.input: for p, f in finder.find(path): if not any([match(p, exclude) for exclude in args.x]): jarrer.add(p, f) jarrer.copy(mozpath.join(args.C, args.zip))
def add_names(names, defaults={}): for name in names: site = copy.deepcopy(defaults) site.update( properties.get_dict( 'browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor'))) site['imageurl'] = image_url_template.format(name=name) sites.append(site) # Now check for existence of an appropriately named drawable. If none # exists, throw. This stops a locale discovering, at runtime, that the # corresponding drawable was not added to en-US. if not opts.resources: continue resources = os.path.abspath(opts.resources) finder = FileFinder(resources) matches = [ p for p, _ in finder.find(drawables_template.format(name=name)) ] if not matches: raise Exception( "Could not find drawable in '{resources}' for '{name}'". format(resources=resources, name=name)) else: if opts.verbose: print( "Found {len} drawables in '{resources}' for '{name}': {matches}" .format(len=len(matches), resources=resources, name=name, matches=matches))
def package_geckoview_aar(topsrcdir, distdir, appname, output_file): jarrer = Jarrer(optimize=False) app_path = os.path.join(distdir, appname) assets = FileFinder(os.path.join(app_path, "assets"), ignore=["*.so"]) for p, f in assets.find("omni.ja"): jarrer.add(os.path.join("assets", p), f) # The folder that contains Fennec's JAR files and resources. base_path = os.path.join(distdir, "..", "mobile", "android", "base") # The resource set is packaged during Fennec's build. resjar = JarReader(os.path.join(base_path, "geckoview_resources.zip")) for p, f in JarFinder(base_path, resjar).find("*"): jarrer.add(os.path.join("res", p), f) # Package the contents of all Fennec JAR files into classes.jar. classes_jar_file = _generate_geckoview_classes_jar(distdir, base_path) jarrer.add("classes.jar", classes_jar_file) # Add R.txt. jarrer.add("R.txt", File(os.path.join(base_path, "R.txt"))) # Finally add AndroidManifest.xml. srcdir = os.path.join(topsrcdir, "mobile", "android", "geckoview_library", "geckoview") jarrer.add("AndroidManifest.xml", File(os.path.join(srcdir, "AndroidManifest.xml"))) jarrer.copy(output_file) return 0
def __init__( self, config, gyp_dir_attrs, path, output, executor, action_overrides, non_unified_sources, ): self._path = path self._config = config self._output = output self._non_unified_sources = non_unified_sources self._gyp_dir_attrs = gyp_dir_attrs self._action_overrides = action_overrides self.execution_time = 0.0 self._results = [] # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. if config.substs["CC_TYPE"] == "clang-cl": # This isn't actually used anywhere in this generator, but it's needed # to override the registry detection of VC++ in gyp. os.environ.update( ensure_subprocess_env({ "GYP_MSVS_OVERRIDE_PATH": "fake_path", "GYP_MSVS_VERSION": config.substs["MSVS_VERSION"], })) params = { "parallel": False, "generator_flags": {}, "build_files": [path], "root_targets": None, } if gyp_dir_attrs.no_chromium: includes = [] depth = mozpath.dirname(path) else: depth = chrome_src # Files that gyp_chromium always includes includes = [ mozpath.join(script_dir, "gyp_includes", "common.gypi") ] finder = FileFinder(chrome_src) includes.extend( mozpath.join(chrome_src, name) for name, _ in finder.find("*/supplement.gypi")) str_vars = dict(gyp_dir_attrs.variables) str_vars["python"] = sys.executable self._gyp_loader_future = executor.submit(load_gyp, [path], "mozbuild", str_vars, includes, depth, params)
def distribution_files(root): """Find all files suitable for distributing. Given the path to generated Sphinx documentation, returns an iterable of (path, BaseFile) for files that should be archived, uploaded, etc. Paths are relative to given root directory. """ finder = FileFinder(root, ignore=('_staging', '_venv')) return finder.find('**')
def _denormalize_symlinks(target): # If any files inside the vendored package were symlinks, turn them into normal files # because hg.mozilla.org forbids symlinks in the repository. link_finder = FileFinder(target) for _, f in link_finder.find("**"): if os.path.islink(f.path): link_target = os.path.realpath(f.path) os.unlink(f.path) shutil.copyfile(link_target, f.path)
def distribution_files(root): """Find all files suitable for distributing. Given the path to generated Sphinx documentation, returns an iterable of (path, BaseFile) for files that should be archived, uploaded, etc. Paths are relative to given root directory. """ finder = FileFinder(root, ignore=("_staging", "_venv")) return finder.find("**")
def package_gcno_tree(root, output_file): # XXX JarWriter doesn't support unicode strings, see bug 1056859 if isinstance(root, unicode): root = root.encode('utf-8') finder = FileFinder(root) jarrer = Jarrer(optimize=False) for p, f in finder.find("**/*.gcno"): jarrer.add(p, f) jarrer.copy(output_file)
def _get_files_info(self, paths, rev=None): from mozbuild.frontend.reader import default_finder from mozpack.files import FileFinder, MercurialRevisionFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) repo = None if rev: hg_path = os.path.join(self.topsrcdir, '.hg') if not os.path.exists(hg_path): raise InvalidPathException('a Mercurial repo is required ' 'when specifying a revision') repo = self.topsrcdir # We need two finders because the reader's finder operates on # absolute paths. finder = FileFinder(self.topsrcdir) if repo: reader_finder = MercurialRevisionFinder(repo, rev=rev, recognize_repo_paths=True) else: reader_finder = default_finder # Expand wildcards. # One variable is for ordering. The other for membership tests. # (Membership testing on a list can be slow.) allpaths = [] all_paths_set = set() for p in relpaths: if '*' not in p: if p not in all_paths_set: all_paths_set.add(p) allpaths.append(p) continue if repo: raise InvalidPathException( 'cannot use wildcard in version control mode') for path, f in finder.find(p): if path not in all_paths_set: all_paths_set.add(path) allpaths.append(path) reader = self._get_reader(finder=reader_finder) return reader.files_info(allpaths)
def find_sdk_tool(binary, log=None): if binary.lower().endswith(".exe"): binary = binary[:-4] maybe = os.environ.get(binary.upper()) if maybe: log( logging.DEBUG, "msix", {"binary": binary, "path": maybe}, "Found {binary} in environment: {path}", ) return mozpath.normsep(maybe) maybe = which( binary, extra_search_dirs=["c:/Windows/System32/WindowsPowershell/v1.0"] ) if maybe: log( logging.DEBUG, "msix", {"binary": binary, "path": maybe}, "Found {binary} on path: {path}", ) return mozpath.normsep(maybe) sdk = os.environ.get("WINDOWSSDKDIR") or "C:/Program Files (x86)/Windows Kits/10" log( logging.DEBUG, "msix", {"binary": binary, "sdk": sdk}, "Looking for {binary} in Windows SDK: {sdk}", ) if sdk: # Like `bin/VERSION/ARCH/tool.exe`. finder = FileFinder(sdk) # TODO: handle running on ARM. is_64bits = sys.maxsize > 2 ** 32 arch = "x64" if is_64bits else "x86" for p, f in finder.find( "bin/**/{arch}/{binary}.exe".format(arch=arch, binary=binary) ): maybe = mozpath.normsep(mozpath.join(sdk, p)) log( logging.DEBUG, "msix", {"binary": binary, "path": maybe}, "Found {binary} in Windows SDK: {path}", ) return maybe return None
def global_payload(config, **lintargs): # A global linter that runs the external linter to actually lint. finder = FileFinder(lintargs["root"]) files = [ mozpath.join(lintargs["root"], p) for p, _ in finder.find("files/**") ] issues = external(files, config, **lintargs) for issue in issues: # Make issue look like it comes from this linter. issue.linter = "global_payload" return issues
def resolve_files(): """Resolve the files that constitute a standalone toolchain. This is a generator of (dest path, file) where the destination path is relative and the file instance is a BaseFile from mozpack. """ vs_path, sdk_path = find_vs_paths() for entry in VS_PATTERNS: finder = FileFinder(vs_path, ignore=entry.get('ignore', [])) for p, f in finder.find(entry['pattern']): assert p.startswith(('VC/', 'DIA SDK/')) yield p.encode('utf-8'), f for entry in SDK_PATTERNS: finder = FileFinder(sdk_path, ignore=entry.get('ignore', [])) for p, f in finder.find(entry['pattern']): relpath = 'SDK/%s' % p yield relpath.encode('utf-8'), f
def _get_files_info(self, paths, rev=None): from mozbuild.frontend.reader import default_finder from mozpack.files import FileFinder, MercurialRevisionFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) repo = None if rev: hg_path = os.path.join(self.topsrcdir, '.hg') if not os.path.exists(hg_path): raise InvalidPathException('a Mercurial repo is required ' 'when specifying a revision') repo = self.topsrcdir # We need two finders because the reader's finder operates on # absolute paths. finder = FileFinder(self.topsrcdir) if repo: reader_finder = MercurialRevisionFinder(repo, rev=rev, recognize_repo_paths=True) else: reader_finder = default_finder # Expand wildcards. # One variable is for ordering. The other for membership tests. # (Membership testing on a list can be slow.) allpaths = [] all_paths_set = set() for p in relpaths: if '*' not in p: if p not in all_paths_set: all_paths_set.add(p) allpaths.append(p) continue if repo: raise InvalidPathException('cannot use wildcard in version control mode') for path, f in finder.find(p): if path not in all_paths_set: all_paths_set.add(path) allpaths.append(path) reader = self._get_reader(finder=reader_finder) return reader.files_info(allpaths)
def _lint_dir(self, path, config, **lintargs): if not config.get('extensions'): patterns = ['**'] else: patterns = ['**/*.{}'.format(e) for e in config['extensions']] errors = [] finder = FileFinder(path, ignore=lintargs.get('exclude', [])) for pattern in patterns: for p, f in finder.find(pattern): errors.extend(self._lint(os.path.join(path, p), config, **lintargs)) return errors
class TestFileFinder(MatchTestTemplate, TestWithTmpDir): def add(self, path): ensure_parent_dir(self.tmppath(path)) open(self.tmppath(path), 'wb').write(path) def do_check(self, pattern, result): if result: self.assertTrue(self.finder.contains(pattern)) else: self.assertFalse(self.finder.contains(pattern)) self.assertEqual(sorted(list(f for f, c in self.finder.find(pattern))), sorted(result)) def test_file_finder(self): self.finder = FileFinder(self.tmpdir) self.do_match_test() self.add('foo/.foo') self.add('foo/.bar/foo') self.assertTrue(self.finder.contains('foo/.foo')) self.assertTrue(self.finder.contains('foo/.bar')) self.assertTrue('foo/.foo' in [f for f, c in self.finder.find('foo/.foo')]) self.assertTrue('foo/.bar/foo' in [f for f, c in self.finder.find('foo/.bar')]) self.assertEqual(sorted([f for f, c in self.finder.find('foo/.*')]), ['foo/.bar/foo', 'foo/.foo']) for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']: self.assertFalse('foo/.foo' in [f for f, c in self.finder.find(pattern)]) self.assertFalse('foo/.bar/foo' in [f for f, c in self.finder.find(pattern)]) self.assertEqual(sorted([f for f, c in self.finder.find(pattern)]), sorted([f for f, c in self.finder if mozpack.path.match(f, pattern)]))
def test_add_from_finder(self): s = MockDest() with JarWriter(fileobj=s, optimize=self.optimize) as jar: finder = FileFinder(test_data_path) for p, f in finder.find('test_data'): jar.add('test_data', f) jar = JarReader(fileobj=s) files = [j for j in jar] self.assertEqual(files[0].filename, 'test_data') self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), 'test_data')
def make_archive(archive_name, base, exclude, include, compress): finder = FileFinder(base, ignore=exclude) if not include: include = ['*'] if not compress: compress = ['**/*.sym'] archive_basename = os.path.basename(archive_name) with open(archive_name, 'wb') as fh: with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer: for pat in include: for p, f in finder.find(pat): print(' Adding to "%s":\n\t"%s"' % (archive_basename, p)) should_compress = any(mozpath.match(p, pat) for pat in compress) writer.add(p.encode('utf-8'), f, mode=f.mode, compress=should_compress, skip_duplicates=True)
def package_geckolibs_aar(topsrcdir, distdir, appname, output_file): jarrer = Jarrer(optimize=False) srcdir = os.path.join(topsrcdir, "mobile", "android", "geckoview_library", "geckolibs") jarrer.add("AndroidManifest.xml", File(os.path.join(srcdir, "AndroidManifest.xml"))) jarrer.add("classes.jar", File(os.path.join(srcdir, "classes.jar"))) jni = FileFinder(os.path.join(distdir, appname, "lib")) for p, f in jni.find("**/*.so"): jarrer.add(os.path.join("jni", p), f) # Include the buildinfo JSON as an asset, to give future consumers at least # a hope of determining where this AAR came from. json = FileFinder(distdir, ignore=["*.mozinfo.json"]) for p, f in json.find("*.json"): jarrer.add(os.path.join("assets", p), f) # This neatly ignores omni.ja. assets = FileFinder(os.path.join(distdir, appname, "assets")) for p, f in assets.find("**/*.so"): jarrer.add(os.path.join("assets", p), f) jarrer.copy(output_file) return 0
def package_geckolibs_aar(topsrcdir, distdir, appname, output_file): jarrer = Jarrer(optimize=False) srcdir = os.path.join(topsrcdir, 'mobile', 'android', 'geckoview_library', 'geckolibs') jarrer.add('AndroidManifest.xml', File(os.path.join(srcdir, 'AndroidManifest.xml'))) jarrer.add('classes.jar', File(os.path.join(srcdir, 'classes.jar'))) jni = FileFinder(os.path.join(distdir, appname, 'lib')) for p, f in jni.find('**/*.so'): jarrer.add(os.path.join('jni', p), f) # Include the buildinfo JSON as an asset, to give future consumers at least # a hope of determining where this AAR came from. json = FileFinder(distdir, ignore=['*.mozinfo.json']) for p, f in json.find('*.json'): jarrer.add(os.path.join('assets', p), f) # This neatly ignores omni.ja. assets = FileFinder(os.path.join(distdir, appname, 'assets')) for p, f in assets.find('**/*.so'): jarrer.add(os.path.join('assets', p), f) jarrer.copy(output_file) return 0
def _generate_geckoview_classes_jar(distdir, base_path): base_folder = FileFinder(base_path, ignore=['gecko-R.jar']) # Unzip all jar files into $(DISTDIR)/geckoview_aar_classes. geckoview_aar_classes_path = os.path.join(distdir, 'geckoview_aar_classes') shutil.rmtree(geckoview_aar_classes_path, ignore_errors=True) util.ensureParentDir(geckoview_aar_classes_path) for p, f in base_folder.find('*.jar'): with zipfile.ZipFile(f.path) as zf: zf.extractall(geckoview_aar_classes_path) # Rezip them into a single classes.jar file. classes_jar_path = os.path.join(distdir, 'classes.jar') _zipdir(geckoview_aar_classes_path, classes_jar_path) return File(classes_jar_path)
def process_manifest( destdir, paths, track=None, remove_unaccounted=True, remove_all_directory_symlinks=True, remove_empty_directories=True, defines={}, ): if track: if os.path.exists(track): # We use the same format as install manifests for the tracking # data. manifest = InstallManifest(path=track) remove_unaccounted = FileRegistry() dummy_file = BaseFile() finder = FileFinder(destdir, find_executables=False, find_dotfiles=True) for dest in manifest._dests: for p, f in finder.find(dest): remove_unaccounted.add(p, dummy_file) else: # If tracking is enabled and there is no file, we don't want to # be removing anything. remove_unaccounted = False remove_empty_directories = False remove_all_directory_symlinks = False manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier, defines_override=defines) result = copier.copy( destdir, remove_unaccounted=remove_unaccounted, remove_all_directory_symlinks=remove_all_directory_symlinks, remove_empty_directories=remove_empty_directories, ) if track: manifest.write(path=track) return result
def __init__(self, *args, **kargs): FileFinder.__init__(self, *args, **kargs) self.files = FileRegistry() self.kind = 'flat' self.omnijar = None self.jarlogs = {} self.optimizedjars = False self.compressed = True jars = set() for p, f in FileFinder.find(self, '*'): # Skip the precomplete file, which is generated at packaging time. if p == 'precomplete': continue base = mozpath.dirname(p) # If the file is a zip/jar that is not a .xpi, and contains a # chrome.manifest, it is an omnijar. All the files it contains # go in the directory containing the omnijar. Manifests are merged # if there is a corresponding manifest in the directory. if not p.endswith('.xpi') and self._maybe_zip(f) and \ (mozpath.basename(p) == self.omnijar or not self.omnijar): jar = self._open_jar(p, f) if 'chrome.manifest' in jar: self.kind = 'omni' self.omnijar = mozpath.basename(p) self._fill_with_jar(base, jar) continue # If the file is a manifest, scan its entries for some referencing # jar: urls. If there are some, the files contained in the jar they # point to, go under a directory named after the jar. if is_manifest(p): m = self.files[p] if self.files.contains(p) \ else ManifestFile(base) for e in parse_manifest(self.base, p, f.open()): m.add(self._handle_manifest_entry(e, jars)) if self.files.contains(p): continue f = m # If the file is a packed addon, unpack it under a directory named # after the xpi. if p.endswith('.xpi') and self._maybe_zip(f): self._fill_with_jar(p[:-4], self._open_jar(p, f)) continue if not p in jars: self.files.add(p, f)
def _generate_python_api_docs(self): """Generate Python API doc files.""" out_dir = os.path.join(self._output_dir, 'staging', 'python') base_args = ['sphinx', '--no-toc', '-o', out_dir] for p in sorted(self._python_package_dirs): full = os.path.join(self._topsrcdir, p) finder = FileFinder(full, find_executables=False) dirs = {os.path.dirname(f[0]) for f in finder.find('**')} excludes = {d for d in dirs if d.endswith('test')} args = list(base_args) args.append(full) args.extend(excludes) sphinx.apidoc.main(args)
def _generate_python_api_docs(self): """Generate Python API doc files.""" out_dir = os.path.join(self._docs_dir, "python") base_args = ["sphinx", "--no-toc", "-o", out_dir] for p in sorted(self._python_package_dirs): full = os.path.join(self._topsrcdir, p) finder = FileFinder(full, find_executables=False) dirs = {os.path.dirname(f[0]) for f in finder.find("**")} excludes = {d for d in dirs if d.endswith("test")} args = list(base_args) args.append(full) args.extend(excludes) sphinx.apidoc.main(args)
def resolve_files(): """Resolve the files that constitute a standalone toolchain. This is a generator of (dest path, file) where the destination path is relative and the file instance is a BaseFile from mozpack. """ vs_path, sdk_path = find_vs_paths() for entry in PATTERNS: fullpath = entry['srcdir'] % { 'vs_path': vs_path, 'sdk_path': sdk_path, } for pattern in entry['files']: finder = FileFinder(fullpath, ignore=pattern.get('ignore', [])) for p, f in finder.find(pattern['pattern']): dstpath = '%s/%s' % (entry['dstdir'], p) yield dstpath.encode('utf-8'), f
def main(args): parser = argparse.ArgumentParser() parser.add_argument("-C", metavar='DIR', default=".", help="Change to given directory before considering " "other paths") parser.add_argument("zip", help="Path to zip file to write") parser.add_argument("input", nargs="+", help="Path to files to add to zip") args = parser.parse_args(args) jarrer = Jarrer(optimize=False) with errors.accumulate(): finder = FileFinder(args.C) for path in args.input: for p, f in finder.find(path): jarrer.add(p, f) jarrer.copy(mozpath.join(args.C, args.zip))
def package_coverage_data(root, output_file): # XXX JarWriter doesn't support unicode strings, see bug 1056859 if isinstance(root, unicode): root = root.encode('utf-8') finder = FileFinder(root) jarrer = Jarrer(optimize=False) for p, f in finder.find("**/*.gcno"): jarrer.add(p, f) dist_include_manifest = mozpath.join(buildconfig.topobjdir, '_build_manifests', 'install', 'dist_include') linked_files = describe_install_manifest(dist_include_manifest, 'dist/include') mapping_file = GeneratedFile(json.dumps(linked_files, sort_keys=True)) jarrer.add('linked-files-map.json', mapping_file) jarrer.copy(output_file)
def __init__(self, config, gyp_dir_attrs, path, output, executor, action_overrides, non_unified_sources): self._path = path self._config = config self._output = output self._non_unified_sources = non_unified_sources self._gyp_dir_attrs = gyp_dir_attrs self._action_overrides = action_overrides self.execution_time = 0.0 self._results = [] # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. path = encode(path) if bool(config.substs['_MSC_VER']): # This isn't actually used anywhere in this generator, but it's needed # to override the registry detection of VC++ in gyp. os.environ['GYP_MSVS_OVERRIDE_PATH'] = 'fake_path' os.environ['GYP_MSVS_VERSION'] = config.substs['MSVS_VERSION'] params = { b'parallel': False, b'generator_flags': {}, b'build_files': [path], b'root_targets': None, } if gyp_dir_attrs.no_chromium: includes = [] depth = mozpath.dirname(path) else: depth = chrome_src # Files that gyp_chromium always includes includes = [encode(mozpath.join(script_dir, 'common.gypi'))] finder = FileFinder(chrome_src) includes.extend(encode(mozpath.join(chrome_src, name)) for name, _ in finder.find('*/supplement.gypi')) str_vars = dict((name, encode(value)) for name, value in gyp_dir_attrs.variables.items()) self._gyp_loader_future = executor.submit(load_gyp, [path], b'mozbuild', str_vars, includes, encode(depth), params)
def hash_paths(base_path, patterns): """ Give a list of path patterns, return a digest of the contents of all the corresponding files, similarly to git tree objects or mercurial manifests. Each file is hashed. The list of all hashes and file paths is then itself hashed to produce the result. """ finder = FileFinder(base_path) h = hashlib.sha256() files = {} for pattern in patterns: files.update(finder.find(pattern)) for path in sorted(files.keys()): h.update('{} {}\n'.format( _hash_path(mozpath.abspath(mozpath.join(base_path, path))), mozpath.normsep(path) )) return h.hexdigest()