Ejemplo n.º 1
0
    def walk_topsrcdir(self):
        """Read all moz.build files in the source tree.

        This is different from read_topsrcdir() in that this version performs a
        filesystem walk to discover every moz.build file rather than relying on
        data from executed moz.build files to drive traversal.

        This is a generator of Sandbox instances.
        """
        # In the future, we may traverse moz.build files by looking
        # for DIRS references in the AST, even if a directory is added behind
        # a conditional. For now, just walk the filesystem.
        ignore = {
            # Ignore fake moz.build files used for testing moz.build.
            'python/mozbuild/mozbuild/test',

            # Ignore object directories.
            'obj*',
        }

        finder = FileFinder(self.topsrcdir, find_executables=False,
            ignore=ignore)

        for path, f in finder.find('**/moz.build'):
            path = os.path.join(self.topsrcdir, path)
            for s in self.read_mozbuild(path, self.config, descend=False,
                filesystem_absolute=True, read_tiers=True):
                yield s
Ejemplo n.º 2
0
    def _get_files_info(self, paths):
        from mozpack.files import FileFinder

        # Normalize to relative from topsrcdir.
        relpaths = []
        for p in paths:
            a = mozpath.abspath(p)
            if not mozpath.basedir(a, [self.topsrcdir]):
                raise InvalidPathException('path is outside topsrcdir: %s' % p)

            relpaths.append(mozpath.relpath(a, self.topsrcdir))

        finder = FileFinder(self.topsrcdir, find_executables=False)

        # Expand wildcards.
        allpaths = []
        for p in relpaths:
            if '*' not in p:
                if p not in allpaths:
                    allpaths.append(p)
                continue

            for path, f in finder.find(p):
                if path not in allpaths:
                    allpaths.append(path)

        reader = self._get_reader()
        return reader.files_info(allpaths)
def package_geckoview_aar(topsrcdir, distdir, appname, output_file):
    jarrer = Jarrer(optimize=False)
    app_path = os.path.join(distdir, appname)
    assets = FileFinder(os.path.join(app_path, "assets"), ignore=["*.so"])
    for p, f in assets.find("omni.ja"):
        jarrer.add(os.path.join("assets", p), f)

    # The folder that contains Fennec's JAR files and resources.
    base_path = os.path.join(distdir, "..", "mobile", "android", "base")

    # The resource set is packaged during Fennec's build.
    resjar = JarReader(os.path.join(base_path, "geckoview_resources.zip"))
    for p, f in JarFinder(base_path, resjar).find("*"):
        jarrer.add(os.path.join("res", p), f)

    # Package the contents of all Fennec JAR files into classes.jar.
    classes_jar_file = _generate_geckoview_classes_jar(distdir, base_path)
    jarrer.add("classes.jar", classes_jar_file)

    # Add R.txt.
    jarrer.add("R.txt", File(os.path.join(base_path, "R.txt")))

    # Finally add AndroidManifest.xml.
    srcdir = os.path.join(topsrcdir, "mobile", "android", "geckoview_library", "geckoview")
    jarrer.add("AndroidManifest.xml", File(os.path.join(srcdir, "AndroidManifest.xml")))

    jarrer.copy(output_file)
    return 0
Ejemplo n.º 4
0
def find_files(archive):
    for entry in ARCHIVE_FILES[archive]:
        source = entry['source']
        base = entry.get('base', '')
        pattern = entry.get('pattern')
        patterns = entry.get('patterns', [])
        if pattern:
            patterns.append(pattern)
        dest = entry.get('dest')
        ignore = list(entry.get('ignore', []))
        ignore.append('**/.mkdir.done')
        ignore.append('**/*.pyc')

        common_kwargs = {
            'find_executables': False,
            'find_dotfiles': True,
            'ignore': ignore,
        }

        finder = FileFinder(os.path.join(source, base), **common_kwargs)

        for pattern in patterns:
            for p, f in finder.find(pattern):
                if dest:
                    p = mozpath.join(dest, p)
                yield p, f
Ejemplo n.º 5
0
def explode(aar, destdir):
    # Take just the support-v4-22.2.1 part.
    name, _ = os.path.splitext(os.path.basename(aar))

    destdir = mozpath.join(destdir, name)
    if os.path.exists(destdir):
        # We always want to start fresh.
        shutil.rmtree(destdir)
    ensureParentDir(destdir)
    with zipfile.ZipFile(aar) as zf:
        zf.extractall(destdir)

    # classes.jar is always present.  However, multiple JAR files with the same
    # name confuses our staged Proguard process in
    # mobile/android/base/Makefile.in, so we make the names unique here.
    classes_jar = mozpath.join(destdir, name + '-classes.jar')
    os.rename(mozpath.join(destdir, 'classes.jar'), classes_jar)

    # Embedded JAR libraries are optional.
    finder = FileFinder(mozpath.join(destdir, 'libs'))
    for p, _ in finder.find('*.jar'):
        jar = mozpath.join(finder.base, name + '-' + p)
        os.rename(mozpath.join(finder.base, p), jar)

    # Frequently assets/ is present but empty.  Protect against meaningless
    # changes to the AAR files by deleting empty assets/ directories.
    assets = mozpath.join(destdir, 'assets')
    try:
        os.rmdir(assets)
    except OSError, e:
        if e.errno in (errno.ENOTEMPTY, errno.ENOENT):
            pass
        else:
            raise
Ejemplo n.º 6
0
def resolve_files():
    """Resolve the files that constitute a standalone toolchain.

    This is a generator of (dest path, file) where the destination
    path is relative and the file instance is a BaseFile from mozpack.
    """
    vs_path, sdk_path = find_vs_paths()

    for entry in VS_PATTERNS:
        finder = FileFinder(vs_path, find_executables=False,
                            ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            assert p.startswith(('VC/', 'DIA SDK/'))

            for source, dest in entry.get('rewrite', []):
                p = p.replace(source, dest)

            yield p.encode('utf-8'), f

    for entry in SDK_PATTERNS:
        finder = FileFinder(sdk_path, find_executables=False,
                            ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            # We remove the SDK version from the path so we don't have
            # to update other configs when we change the SDK version.
            p = p.replace('/%s/' % SDK_RELEASE, '/')
            relpath = 'SDK/%s' % p

            yield relpath.encode('utf-8'), f
Ejemplo n.º 7
0
def get_generated_sources():
    '''
    Yield tuples of `(objdir-rel-path, file)` for generated source files
    in this objdir, where `file` is either an absolute path to the file or
    a `mozpack.File` instance.
    '''
    import buildconfig

    # First, get the list of generated sources produced by the build backend.
    gen_sources = os.path.join(buildconfig.topobjdir, 'generated-sources.json')
    with open(gen_sources, 'rb') as f:
        data = json.load(f)
    for f in data['sources']:
        yield f, mozpath.join(buildconfig.topobjdir, f)
    # Next, return all the files in $objdir/ipc/ipdl/_ipdlheaders.
    base = 'ipc/ipdl/_ipdlheaders'
    finder = FileFinder(mozpath.join(buildconfig.topobjdir, base))
    for p, f in finder.find('**/*.h'):
        yield mozpath.join(base, p), f
    # Next, return any Rust source files that were generated into the Rust
    # object directory.
    rust_build_kind = 'debug' if buildconfig.substs.get('MOZ_DEBUG_RUST') else 'release'
    base = mozpath.join('toolkit/library',
                        buildconfig.substs['RUST_TARGET'],
                        rust_build_kind,
                        'build')
    finder = FileFinder(mozpath.join(buildconfig.topobjdir, base))
    for p, f in finder.find('**/*.rs'):
        yield mozpath.join(base, p), f
def package_geckoview_aar(topsrcdir, distdir, appname, output_file):
    jarrer = Jarrer(optimize=False)
    app_path = os.path.join(distdir, appname)
    assets = FileFinder(os.path.join(app_path, 'assets'), ignore=['*.so'])
    for p, f in assets.find('omni.ja'):
        jarrer.add(os.path.join('assets', p), f)

    # The folder that contains Fennec's JAR files and resources.
    base_path = os.path.join(distdir, '..', 'mobile', 'android', 'base')

    # The resource set is packaged during Fennec's build.
    resjar = JarReader(os.path.join(base_path, 'geckoview_resources.zip'))
    for p, f in JarFinder(base_path, resjar).find('*'):
        jarrer.add(os.path.join('res', p), f)

    # Package the contents of all Fennec JAR files into classes.jar.
    classes_jar_file = _generate_geckoview_classes_jar(distdir, base_path)
    jarrer.add('classes.jar', classes_jar_file)

    # Add R.txt.
    jarrer.add('R.txt', File(os.path.join(base_path, 'R.txt')))

    # Finally add AndroidManifest.xml.
    srcdir = os.path.join(topsrcdir, 'mobile', 'android', 'geckoview_library', 'geckoview')
    jarrer.add('AndroidManifest.xml', File(os.path.join(srcdir, 'AndroidManifest.xml')))

    jarrer.copy(output_file)
    return 0
Ejemplo n.º 9
0
    def all_mozbuild_paths(self):
        """Iterator over all available moz.build files.

        This method has little to do with the reader. It should arguably belong
        elsewhere.
        """
        # In the future, we may traverse moz.build files by looking
        # for DIRS references in the AST, even if a directory is added behind
        # a conditional. For now, just walk the filesystem.
        ignore = {
            # Ignore fake moz.build files used for testing moz.build.
            'python/mozbuild/mozbuild/test',

            # Ignore object directories.
            'obj*',
        }

        finder = FileFinder(self.config.topsrcdir, find_executables=False,
            ignore=ignore)

        # The root doesn't get picked up by FileFinder.
        yield 'moz.build'

        for path, f in finder.find('**/moz.build'):
            yield path
Ejemplo n.º 10
0
    def update_uuids(self, path, interfaces):
        import os
        import xpidl
        from mozpack.files import FileFinder
        import mozpack.path
        from tempfile import mkdtemp

        finder = FileFinder(path, find_executables=False)
        # Avoid creating xpidllex and xpidlyacc in the current directory.
        tmpdir = mkdtemp()
        try:
            parser = xpidl.IDLParser(outputdir=tmpdir)
            registry = InterfaceRegistry()
            for p, f in finder.find('**/*.idl'):
                p = mozpack.path.join(path, p)
                try:
                    content = f.open().read()
                    idl = parser.parse(content, filename=p)
                except Exception:
                    continue
                for prod in idl.productions:
                    if isinstance(prod, xpidl.Interface):
                         registry.add(Interface(p, prod))
        finally:
            import shutil
            shutil.rmtree(tmpdir)

        updates = IDLUpdater(registry)

        for interface in interfaces:
            updates.add(interface)

        updates.update()
Ejemplo n.º 11
0
def distribution_files(root):
    """Find all files suitable for distributing.

    Given the path to generated Sphinx documentation, returns an iterable
    of (path, BaseFile) for files that should be archived, uploaded, etc.
    Paths are relative to given root directory.
    """
    finder = FileFinder(root, ignore=('_staging', '_venv'))
    return finder.find('**')
Ejemplo n.º 12
0
    def test_get(self):
        self.prepare_match_test()
        finder = FileFinder(self.tmpdir)

        self.assertIsNone(finder.get('does-not-exist'))
        res = finder.get('bar')
        self.assertIsInstance(res, File)
        self.assertEqual(mozpath.normpath(res.path),
                         mozpath.join(self.tmpdir, 'bar'))
Ejemplo n.º 13
0
def package_gcno_tree(root, output_file):
    # XXX JarWriter doesn't support unicode strings, see bug 1056859
    if isinstance(root, unicode):
        root = root.encode('utf-8')

    finder = FileFinder(root)
    jarrer = Jarrer(optimize=False)
    for p, f in finder.find("**/*.gcno"):
        jarrer.add(p, f)
    jarrer.copy(output_file)
Ejemplo n.º 14
0
    def _get_files_info(self, paths, rev=None):
        from mozbuild.frontend.reader import default_finder
        from mozpack.files import FileFinder, MercurialRevisionFinder

        # Normalize to relative from topsrcdir.
        relpaths = []
        for p in paths:
            a = mozpath.abspath(p)
            if not mozpath.basedir(a, [self.topsrcdir]):
                raise InvalidPathException('path is outside topsrcdir: %s' % p)

            relpaths.append(mozpath.relpath(a, self.topsrcdir))

        repo = None
        if rev:
            hg_path = os.path.join(self.topsrcdir, '.hg')
            if not os.path.exists(hg_path):
                raise InvalidPathException('a Mercurial repo is required '
                        'when specifying a revision')

            repo = self.topsrcdir

        # We need two finders because the reader's finder operates on
        # absolute paths.
        finder = FileFinder(self.topsrcdir)
        if repo:
            reader_finder = MercurialRevisionFinder(repo, rev=rev,
                                                    recognize_repo_paths=True)
        else:
            reader_finder = default_finder

        # Expand wildcards.
        # One variable is for ordering. The other for membership tests.
        # (Membership testing on a list can be slow.)
        allpaths = []
        all_paths_set = set()
        for p in relpaths:
            if '*' not in p:
                if p not in all_paths_set:
                    all_paths_set.add(p)
                    allpaths.append(p)
                continue

            if repo:
                raise InvalidPathException('cannot use wildcard in version control mode')

            for path, f in finder.find(p):
                if path not in all_paths_set:
                    all_paths_set.add(path)
                    allpaths.append(path)

        reader = self._get_reader(finder=reader_finder)
        return reader.files_info(allpaths)
Ejemplo n.º 15
0
    def test_add_from_finder(self):
        s = MockDest()
        with JarWriter(fileobj=s, optimize=self.optimize) as jar:
            finder = FileFinder(test_data_path)
            for p, f in finder.find('test_data'):
                jar.add('test_data', f)

        jar = JarReader(fileobj=s)
        files = [j for j in jar]

        self.assertEqual(files[0].filename, 'test_data')
        self.assertFalse(files[0].compressed)
        self.assertEqual(files[0].read(), 'test_data')
Ejemplo n.º 16
0
def make_archive(archive_name, base, exclude, include, compress):
    finder = FileFinder(base, ignore=exclude)
    if not include:
        include = ['*']
    if not compress:
        compress = ['**/*.sym']
    archive_basename = os.path.basename(archive_name)
    with open(archive_name, 'wb') as fh:
        with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
            for pat in include:
                for p, f in finder.find(pat):
                    print('  Adding to "%s":\n\t"%s"' % (archive_basename, p))
                    should_compress = any(mozpath.match(p, pat) for pat in compress)
                    writer.add(p.encode('utf-8'), f, mode=f.mode,
                               compress=should_compress, skip_duplicates=True)
def _generate_geckoview_classes_jar(distdir, base_path):
    base_folder = FileFinder(base_path, ignore=['gecko-R.jar'])

    # Unzip all jar files into $(DISTDIR)/geckoview_aar_classes.
    geckoview_aar_classes_path = os.path.join(distdir, 'geckoview_aar_classes')
    shutil.rmtree(geckoview_aar_classes_path, ignore_errors=True)
    util.ensureParentDir(geckoview_aar_classes_path)

    for p, f in base_folder.find('*.jar'):
        with zipfile.ZipFile(f.path) as zf:
            zf.extractall(geckoview_aar_classes_path)

    # Rezip them into a single classes.jar file.
    classes_jar_path =  os.path.join(distdir, 'classes.jar')
    _zipdir(geckoview_aar_classes_path, classes_jar_path)
    return File(classes_jar_path)
Ejemplo n.º 18
0
    def __init__(self, base1, base2, sorted=[], **kargs):
        '''
        Initialize a UnifiedFinder. base1 and base2 are the base directories
        for the two trees from which files are picked. UnifiedFinder.find()
        will act as FileFinder.find() but will error out when matches can only
        be found in one of the two trees and not the other. It will also error
        out if matches can be found on both ends but their contents are not
        identical.

        The sorted argument gives a list of mozpack.path.match patterns. File
        paths matching one of these patterns will have their contents compared
        with their lines sorted.
        '''
        self._base1 = FileFinder(base1, **kargs)
        self._base2 = FileFinder(base2, **kargs)
        self._sorted = sorted
Ejemplo n.º 19
0
 def _handle_manifest_entry(self, entry, jars):
     jarpath = None
     if isinstance(entry, ManifestEntryWithRelPath) and \
             urlparse(entry.relpath).scheme == 'jar':
         jarpath, entry = self._unjarize(entry, entry.relpath)
     elif isinstance(entry, ManifestResource) and \
             urlparse(entry.target).scheme == 'jar':
         jarpath, entry = self._unjarize(entry, entry.target)
     if jarpath:
         # Don't defer unpacking the jar file. If we already saw
         # it, take (and remove) it from the registry. If we
         # haven't, try to find it now.
         if self.files.contains(jarpath):
             jar = self.files[jarpath]
             self.files.remove(jarpath)
         else:
             jar = [f for p, f in FileFinder.find(self, jarpath)]
             assert len(jar) == 1
             jar = jar[0]
         if not jarpath in jars:
             base = mozpack.path.splitext(jarpath)[0]
             for j in self._open_jar(jarpath, jar):
                 self.files.add(mozpack.path.join(base,
                                                  j.filename),
                                DeflatedFile(j))
         jars.add(jarpath)
         self.kind = 'jar'
     return entry
def process_manifest(
    destdir,
    paths,
    track=None,
    remove_unaccounted=True,
    remove_all_directory_symlinks=True,
    remove_empty_directories=True,
    defines={},
):

    if track:
        if os.path.exists(track):
            # We use the same format as install manifests for the tracking
            # data.
            manifest = InstallManifest(path=track)
            remove_unaccounted = FileRegistry()
            dummy_file = BaseFile()

            finder = FileFinder(destdir, find_executables=False, find_dotfiles=True)
            for dest in manifest._dests:
                for p, f in finder.find(dest):
                    remove_unaccounted.add(p, dummy_file)

        else:
            # If tracking is enabled and there is no file, we don't want to
            # be removing anything.
            remove_unaccounted = False
            remove_empty_directories = False
            remove_all_directory_symlinks = False

    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier, defines_override=defines)
    result = copier.copy(
        destdir,
        remove_unaccounted=remove_unaccounted,
        remove_all_directory_symlinks=remove_all_directory_symlinks,
        remove_empty_directories=remove_empty_directories,
    )

    if track:
        manifest.write(path=track)

    return result
Ejemplo n.º 21
0
    def __init__(self, *args, **kargs):
        FileFinder.__init__(self, *args, **kargs)
        self.files = FileRegistry()
        self.kind = 'flat'
        self.omnijar = None
        self.jarlogs = {}
        self.optimizedjars = False
        self.compressed = True

        jars = set()

        for p, f in FileFinder.find(self, '*'):
            # Skip the precomplete file, which is generated at packaging time.
            if p == 'precomplete':
                continue
            base = mozpath.dirname(p)
            # If the file is a zip/jar that is not a .xpi, and contains a
            # chrome.manifest, it is an omnijar. All the files it contains
            # go in the directory containing the omnijar. Manifests are merged
            # if there is a corresponding manifest in the directory.
            if not p.endswith('.xpi') and self._maybe_zip(f) and \
                    (mozpath.basename(p) == self.omnijar or
                     not self.omnijar):
                jar = self._open_jar(p, f)
                if 'chrome.manifest' in jar:
                    self.kind = 'omni'
                    self.omnijar = mozpath.basename(p)
                    self._fill_with_jar(base, jar)
                    continue
            # If the file is a manifest, scan its entries for some referencing
            # jar: urls. If there are some, the files contained in the jar they
            # point to, go under a directory named after the jar.
            if is_manifest(p):
                m = self.files[p] if self.files.contains(p) \
                    else ManifestFile(base)
                for e in parse_manifest(self.base, p, f.open()):
                    m.add(self._handle_manifest_entry(e, jars))
                if self.files.contains(p):
                    continue
                f = m
            # If the file is a packed addon, unpack it under a directory named
            # after the xpi.
            if p.endswith('.xpi') and self._maybe_zip(f):
                self._fill_with_jar(p[:-4], self._open_jar(p, f))
                continue
            if not p in jars:
                self.files.add(p, f)
class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
    def add(self, path):
        ensure_parent_dir(self.tmppath(path))
        open(self.tmppath(path), 'wb').write(path)

    def do_check(self, pattern, result):
        if result:
            self.assertTrue(self.finder.contains(pattern))
        else:
            self.assertFalse(self.finder.contains(pattern))
        self.assertEqual(sorted(list(f for f, c in self.finder.find(pattern))),
                         sorted(result))

    def test_file_finder(self):
        self.finder = FileFinder(self.tmpdir)
        self.do_match_test()
        self.add('foo/.foo')
        self.add('foo/.bar/foo')
        self.assertTrue(self.finder.contains('foo/.foo'))
        self.assertTrue(self.finder.contains('foo/.bar'))
        self.assertTrue('foo/.foo' in [f for f, c in
                                       self.finder.find('foo/.foo')])
        self.assertTrue('foo/.bar/foo' in [f for f, c in
                                           self.finder.find('foo/.bar')])
        self.assertEqual(sorted([f for f, c in self.finder.find('foo/.*')]),
                         ['foo/.bar/foo', 'foo/.foo'])
        for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']:
            self.assertFalse('foo/.foo' in [f for f, c in
                                            self.finder.find(pattern)])
            self.assertFalse('foo/.bar/foo' in [f for f, c in
                                                self.finder.find(pattern)])
            self.assertEqual(sorted([f for f, c in self.finder.find(pattern)]),
                             sorted([f for f, c in self.finder
                                     if mozpack.path.match(f, pattern)]))
Ejemplo n.º 23
0
    def _generate_python_api_docs(self):
        """Generate Python API doc files."""
        out_dir = os.path.join(self._docs_dir, "python")
        base_args = ["sphinx", "--no-toc", "-o", out_dir]

        for p in sorted(self._python_package_dirs):
            full = os.path.join(self._topsrcdir, p)

            finder = FileFinder(full, find_executables=False)
            dirs = {os.path.dirname(f[0]) for f in finder.find("**")}

            excludes = {d for d in dirs if d.endswith("test")}

            args = list(base_args)
            args.append(full)
            args.extend(excludes)

            sphinx.apidoc.main(args)
Ejemplo n.º 24
0
    def test_ignored_patterns(self):
        """Ignore entries with patterns should be honored."""
        self.prepare_match_test()

        self.add("foo/quxz")

        self.finder = FileFinder(self.tmpdir, ignore=["foo/qux/*"])
        self.do_check("**", ["foo/bar", "foo/baz", "foo/quxz", "bar"])
        self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"])
Ejemplo n.º 25
0
def resolve_files():
    """Resolve the files that constitute a standalone toolchain.

    This is a generator of (dest path, file) where the destination
    path is relative and the file instance is a BaseFile from mozpack.
    """
    vs_path, sdk_path = find_vs_paths()

    for entry in PATTERNS:
        fullpath = entry['srcdir'] % {
            'vs_path': vs_path,
            'sdk_path': sdk_path,
        }
        for pattern in entry['files']:
            finder = FileFinder(fullpath, ignore=pattern.get('ignore', []))
            for p, f in finder.find(pattern['pattern']):
                dstpath = '%s/%s' % (entry['dstdir'], p)
                yield dstpath.encode('utf-8'), f
Ejemplo n.º 26
0
    def test_ignored_patterns(self):
        """Ignore entries with patterns should be honored."""
        self.prepare_match_test()

        self.add('foo/quxz')

        self.finder = FileFinder(self.tmpdir, ignore=['foo/qux/*'])
        self.do_check('**', ['foo/bar', 'foo/baz', 'foo/quxz', 'bar'])
        self.do_check('foo/**', ['foo/bar', 'foo/baz', 'foo/quxz'])
Ejemplo n.º 27
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument("-C", metavar='DIR', default=".",
                        help="Change to given directory before considering "
                        "other paths")
    parser.add_argument("zip", help="Path to zip file to write")
    parser.add_argument("input", nargs="+",
                        help="Path to files to add to zip")
    args = parser.parse_args(args)

    jarrer = Jarrer(optimize=False)

    with errors.accumulate():
        finder = FileFinder(args.C)
        for path in args.input:
            for p, f in finder.find(path):
                jarrer.add(p, f)
        jarrer.copy(mozpath.join(args.C, args.zip))
Ejemplo n.º 28
0
    def _generate_python_api_docs(self):
        """Generate Python API doc files."""
        out_dir = os.path.join(self._output_dir, 'staging', 'python')
        base_args = ['sphinx', '--no-toc', '-o', out_dir]

        for p in sorted(self._python_package_dirs):
            full = os.path.join(self._topsrcdir, p)

            finder = FileFinder(full, find_executables=False)
            dirs = {os.path.dirname(f[0]) for f in finder.find('**')}

            excludes = {d for d in dirs if d.endswith('test')}

            args = list(base_args)
            args.append(full)
            args.extend(excludes)

            sphinx.apidoc.main(args)
Ejemplo n.º 29
0
    def _extract(self, src, dest, keep_extra_files=False):
        """extract source distribution into vendor directory"""

        ignore = ()
        if not keep_extra_files:
            ignore = ("*/doc", "*/docs", "*/test", "*/tests")
        finder = FileFinder(src)
        for archive, _ in finder.find("*"):
            _, ext = os.path.splitext(archive)
            archive_path = os.path.join(finder.base, archive)
            if ext == ".whl":
                # Archive is named like "$package-name-1.0-py2.py3-none-any.whl", and should
                # have four dashes that aren't part of the package name.
                package_name, version, spec, abi, platform_and_suffix = archive.rsplit(
                    "-", 4
                )
                target_package_dir = os.path.join(dest, package_name)
                mozfile.remove(target_package_dir)
                os.mkdir(target_package_dir)

                # Extract all the contents of the wheel into the package subdirectory.
                # We're expecting at least a code directory and a ".dist-info" directory,
                # though there may be a ".data" directory as well.
                mozfile.extract(archive_path, target_package_dir, ignore=ignore)
                _denormalize_symlinks(target_package_dir)
            else:
                # Archive is named like "$package-name-1.0.tar.gz", and the rightmost
                # dash should separate the package name from the rest of the archive
                # specifier.
                package_name, archive_postfix = archive.rsplit("-", 1)
                package_dir = os.path.join(dest, package_name)
                mozfile.remove(package_dir)

                # The archive should only contain one top-level directory, which has
                # the source files. We extract this directory directly to
                # the vendor directory.
                extracted_files = mozfile.extract(archive_path, dest, ignore=ignore)
                assert len(extracted_files) == 1
                extracted_package_dir = extracted_files[0]

                # The extracted package dir includes the version in the name,
                # which we don't we don't want.
                mozfile.move(extracted_package_dir, package_dir)
                _denormalize_symlinks(package_dir)
Ejemplo n.º 30
0
    def __init__(self, config, gyp_dir_attrs, path, output, executor,
                 action_overrides, non_unified_sources):
        self._path = path
        self._config = config
        self._output = output
        self._non_unified_sources = non_unified_sources
        self._gyp_dir_attrs = gyp_dir_attrs
        self._action_overrides = action_overrides
        self.execution_time = 0.0
        self._results = []

        # gyp expects plain str instead of unicode. The frontend code gives us
        # unicode strings, so convert them.
        if config.substs['CC_TYPE'] == 'clang-cl':
            # This isn't actually used anywhere in this generator, but it's needed
            # to override the registry detection of VC++ in gyp.
            os.environ.update(ensure_subprocess_env({
                'GYP_MSVS_OVERRIDE_PATH': 'fake_path',
                'GYP_MSVS_VERSION': config.substs['MSVS_VERSION'],
            }))

        params = {
            'parallel': False,
            'generator_flags': {},
            'build_files': [path],
            'root_targets': None,
        }

        if gyp_dir_attrs.no_chromium:
            includes = []
            depth = mozpath.dirname(path)
        else:
            depth = chrome_src
            # Files that gyp_chromium always includes
            includes = [mozpath.join(script_dir, 'gyp_includes',
                                     'common.gypi')]
            finder = FileFinder(chrome_src)
            includes.extend(mozpath.join(chrome_src, name)
                            for name, _ in finder.find('*/supplement.gypi'))

        str_vars = dict(gyp_dir_attrs.variables)
        self._gyp_loader_future = executor.submit(load_gyp, [path], 'mozbuild',
                                                  str_vars, includes,
                                                  depth, params)
Ejemplo n.º 31
0
def process_manifest(destdir, paths, track,
        no_symlinks=False,
        defines={}):

    if os.path.exists(track):
        # We use the same format as install manifests for the tracking
        # data.
        manifest = InstallManifest(path=track)
        remove_unaccounted = FileRegistry()
        dummy_file = BaseFile()

        finder = FileFinder(destdir, find_dotfiles=True)
        for dest in manifest._dests:
            for p, f in finder.find(dest):
                remove_unaccounted.add(p, dummy_file)

        remove_empty_directories=True
        remove_all_directory_symlinks=True

    else:
        # If tracking is enabled and there is no file, we don't want to
        # be removing anything.
        remove_unaccounted = False
        remove_empty_directories=False
        remove_all_directory_symlinks=False

    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    link_policy = "copy" if no_symlinks else "symlink"
    manifest.populate_registry(
        copier, defines_override=defines, link_policy=link_policy
    )
    result = copier.copy(destdir,
        remove_unaccounted=remove_unaccounted,
        remove_all_directory_symlinks=remove_all_directory_symlinks,
        remove_empty_directories=remove_empty_directories)

    if track:
        manifest.write(path=track)

    return result
Ejemplo n.º 32
0
def global_skipped(config, **lintargs):
    # A global linter that always registers a lint error.  Absence of
    # this error shows that the path exclusion mechanism can cause
    # global lint payloads to not be invoked at all.  In particular,
    # the `extensions` field means that nothing under `files/**` will
    # match.

    finder = FileFinder(lintargs["root"])
    files = [
        mozpath.join(lintargs["root"], p) for p, _ in finder.find("files/**")
    ]

    issues = []
    issues.append(
        result.from_config(config,
                           path=files[0],
                           lineno=1,
                           column=1,
                           rule="not-skipped"))
Ejemplo n.º 33
0
def hash_paths(base_path, patterns):
    """
    Give a list of path patterns, return a digest of the contents of all
    the corresponding files, similarly to git tree objects or mercurial
    manifests.

    Each file is hashed. The list of all hashes and file paths is then
    itself hashed to produce the result.
    """
    finder = FileFinder(base_path)
    h = hashlib.sha256()
    files = {}
    for pattern in patterns:
        files.update(finder.find(pattern))
    for path in sorted(files.keys()):
        h.update('{} {}\n'.format(
            _hash_path(mozpath.abspath(mozpath.join(base_path, path))),
            mozpath.normsep(path)))
    return h.hexdigest()
Ejemplo n.º 34
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument("-C",
                        metavar='DIR',
                        default=".",
                        help="Change to given directory before considering "
                        "other paths")
    parser.add_argument("zip", help="Path to zip file to write")
    parser.add_argument("input", nargs="+", help="Path to files to add to zip")
    args = parser.parse_args(args)

    jarrer = Jarrer(optimize=False)

    with errors.accumulate():
        finder = FileFinder(args.C)
        for path in args.input:
            for p, f in finder.find(path):
                jarrer.add(p, f)
        jarrer.copy(mozpath.join(args.C, args.zip))
Ejemplo n.º 35
0
def package_coverage_data(root, output_file):
    # XXX JarWriter doesn't support unicode strings, see bug 1056859
    if isinstance(root, unicode):
        root = root.encode('utf-8')

    finder = FileFinder(root)
    jarrer = Jarrer()
    for p, f in finder.find("**/*.gcno"):
        jarrer.add(p, f)

    dist_include_manifest = mozpath.join(buildconfig.topobjdir,
                                         '_build_manifests',
                                         'install',
                                         'dist_include')
    linked_files = describe_install_manifest(dist_include_manifest,
                                             'dist/include')
    mapping_file = GeneratedFile(json.dumps(linked_files, sort_keys=True))
    jarrer.add('linked-files-map.json', mapping_file)
    jarrer.copy(output_file)
Ejemplo n.º 36
0
    def _generate_python_api_docs(self):
        """Generate Python API doc files."""
        out_dir = os.path.join(self.staging_dir, "python")
        base_args = ["--no-toc", "-o", out_dir]

        for p in sorted(self.python_package_dirs):
            full = os.path.join(self.topsrcdir, p)

            finder = FileFinder(full)
            dirs = {os.path.dirname(f[0]) for f in finder.find("**")}

            test_dirs = {"test", "tests"}
            excludes = {d for d in dirs if set(PurePath(d).parts) & test_dirs}

            args = list(base_args)
            args.append(full)
            args.extend(excludes)

            sphinx.ext.apidoc.main(argv=args)
Ejemplo n.º 37
0
    def python_safety(self, python=None, **kwargs):
        self.logger = commandline.setup_logging(
            "python-safety", {"raw": sys.stdout})

        self.activate_pipenv(pipfile=os.path.join(here, 'Pipfile'), python=python, populate=True)

        pattern = '**/*requirements*.txt'
        path = mozpath.normsep(os.path.dirname(os.path.dirname(here)))
        finder = FileFinder(path)
        files = [os.path.join(path, p) for p, f in finder.find(pattern)]

        return_code = 0

        self.logger.suite_start(tests=files)
        for filepath in files:
            self._run_python_safety(filepath)

        self.logger.suite_end()
        return return_code
Ejemplo n.º 38
0
 def find_all_specifications(self):
     """Searches the source tree for all specification files
     and returns them as a list."""
     specifications = []
     inclusions = [
         "netwerk/test/unit",
         "security/manager/ssl/tests",
         "services/settings/test/unit/test_remote_settings_signatures",
         "testing/xpcshell/moz-http2",
     ]
     exclusions = ["security/manager/ssl/tests/unit/test_signed_apps"]
     finder = FileFinder(self.topsrcdir)
     for inclusion_path in inclusions:
         for f, _ in finder.find(inclusion_path):
             if basedir(f, exclusions):
                 continue
             if is_specification_file(f):
                 specifications.append(os.path.join(self.topsrcdir, f))
     return specifications
Ejemplo n.º 39
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument("--base-dir",
                        default=os.path.join(buildconfig.topobjdir, "dist",
                                             "bin"),
                        help="Store paths relative to this directory")
    parser.add_argument("zip", help="Path to zip file to write")
    parser.add_argument("input", nargs="+", help="Path to files to add to zip")
    args = parser.parse_args(args)

    jarrer = Jarrer(optimize=False)

    with errors.accumulate():
        finder = FileFinder(args.base_dir)
        for i in args.input:
            path = mozpath.relpath(i, args.base_dir)
            for p, f in finder.find(path):
                jarrer.add(p, f)
        jarrer.copy(args.zip)
Ejemplo n.º 40
0
 def __init__(self, *args, **kwargs):
     super(TestManifestLoader, self).__init__(*args, **kwargs)
     self.finder = FileFinder(self.topsrcdir)
     self.reader = self.mozbuild_reader(config_mode="empty")
     self.variables = {
         '{}_MANIFESTS'.format(k): v[0] for k, v in six.iteritems(TEST_MANIFESTS)
     }
     self.variables.update({
         '{}_MANIFESTS'.format(f.upper()): f for f in REFTEST_FLAVORS
     })
Ejemplo n.º 41
0
def main():
    parser = argparse.ArgumentParser(
        description="Merge two crashreporter symbols directories."
    )
    parser.add_argument("dir1", help="Directory")
    parser.add_argument("dir2", help="Directory to merge")

    options = parser.parse_args()

    dir1_finder = FileFinder(options.dir1)
    dir2_finder = FileFinder(options.dir2)
    finder = UnifiedSymbolsFinder(dir1_finder, dir2_finder)

    copier = FileCopier()
    with errors.accumulate():
        for p, f in finder:
            copier.add(p, f)

    copier.copy(options.dir1, skip_if_older=False)
Ejemplo n.º 42
0
def invalidate(cache, root):
    if not os.path.isfile(cache):
        return

    tc_dir = os.path.join(root, 'taskcluster')
    tmod = max(os.path.getmtime(os.path.join(tc_dir, p)) for p, _ in FileFinder(tc_dir))
    cmod = os.path.getmtime(cache)

    if tmod > cmod:
        os.remove(cache)
Ejemplo n.º 43
0
    def __init__(self, config, gyp_dir_attrs, path, output, executor,
                 action_overrides, non_unified_sources):
        self._path = path
        self._config = config
        self._output = output
        self._non_unified_sources = non_unified_sources
        self._gyp_dir_attrs = gyp_dir_attrs
        self._action_overrides = action_overrides
        self.execution_time = 0.0
        self._results = []

        # gyp expects plain str instead of unicode. The frontend code gives us
        # unicode strings, so convert them.
        path = encode(path)
        if bool(config.substs['_MSC_VER']):
            # This isn't actually used anywhere in this generator, but it's needed
            # to override the registry detection of VC++ in gyp.
            os.environ['GYP_MSVS_OVERRIDE_PATH'] = 'fake_path'
            os.environ['GYP_MSVS_VERSION'] = config.substs['MSVS_VERSION']

        params = {
            b'parallel': False,
            b'generator_flags': {},
            b'build_files': [path],
            b'root_targets': None,
        }

        if gyp_dir_attrs.no_chromium:
            includes = []
            depth = mozpath.dirname(path)
        else:
            depth = chrome_src
            # Files that gyp_chromium always includes
            includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
            finder = FileFinder(chrome_src)
            includes.extend(encode(mozpath.join(chrome_src, name))
                            for name, _ in finder.find('*/supplement.gypi'))

        str_vars = dict((name, encode(value)) for name, value in
                        gyp_dir_attrs.variables.items())
        self._gyp_loader_future = executor.submit(load_gyp, [path], b'mozbuild',
                                                  str_vars, includes,
                                                  encode(depth), params)
Ejemplo n.º 44
0
def expand_exclusions(paths, config, root):
    """Returns all files that match patterns and aren't excluded.

    This is used by some external linters who receive 'batch' files (e.g dirs)
    but aren't capable of applying their own exclusions. There is an argument
    to be made that this step should just apply to all linters no matter what.

    Args:
        paths (list): List of candidate paths to lint.
        config (dict): Linter's config object.
        root (str): Root of the repository.

    Returns:
        Generator which generates list of paths that weren't excluded.
    """
    extensions = [e.lstrip('.') for e in config['extensions']]

    def normalize(path):
        path = mozpath.normpath(path)
        if os.path.isabs(path):
            return path
        return mozpath.join(root, path)

    exclude = map(normalize, config.get('exclude', []))
    for path in paths:
        path = mozpath.normsep(path)
        if os.path.isfile(path):
            if not any(path.startswith(e) for e in exclude):
                yield path
            continue

        ignore = [
            e[len(path):].lstrip('/') for e in exclude
            if mozpath.commonprefix((path, e)) == path
        ]
        finder = FileFinder(path, ignore=ignore)

        _, ext = os.path.splitext(path)
        ext.lstrip('.')

        for ext in extensions:
            for p, f in finder.find("**/*.{}".format(ext)):
                yield os.path.join(path, p)
Ejemplo n.º 45
0
def package_coverage_data(root, output_file):
    # XXX JarWriter doesn't support unicode strings, see bug 1056859
    if isinstance(root, unicode):
        root = root.encode('utf-8')

    finder = FileFinder(root)
    jarrer = Jarrer(optimize=False)
    for p, f in finder.find("**/*.gcno"):
        jarrer.add(p, f)

    dist_include_manifest = mozpath.join(buildconfig.topobjdir,
                                         '_build_manifests',
                                         'install',
                                         'dist_include')
    linked_files = describe_install_manifest(dist_include_manifest,
                                             'dist/include')
    mapping_file = GeneratedFile(json.dumps(linked_files, sort_keys=True))
    jarrer.add('linked-files-map.json', mapping_file)
    jarrer.copy(output_file)
Ejemplo n.º 46
0
def make_archive(archive_name, base, exclude, include, compress):
    finder = FileFinder(base, ignore=exclude)
    if not include:
        include = ['*']
    if not compress:
        compress = ['**/*.sym']
    archive_basename = os.path.basename(archive_name)
    with open(archive_name, 'wb') as fh:
        with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
            for pat in include:
                for p, f in finder.find(pat):
                    print('  Adding to "%s":\n\t"%s"' % (archive_basename, p))
                    should_compress = any(
                        mozpath.match(p, pat) for pat in compress)
                    writer.add(p.encode('utf-8'),
                               f,
                               mode=f.mode,
                               compress=should_compress,
                               skip_duplicates=True)
Ejemplo n.º 47
0
def hash_paths(base_path, patterns):
    """
    Give a list of path patterns, return a digest of the contents of all
    the corresponding files, similarly to git tree objects or mercurial
    manifests.

    Each file is hashed. The list of all hashes and file paths is then
    itself hashed to produce the result.
    """
    finder = FileFinder(base_path)
    h = hashlib.sha256()
    files = {}
    for pattern in patterns:
        files.update(finder.find(pattern))
    for path in sorted(files.keys()):
        h.update('{} {}\n'.format(
            _hash_path(mozpath.abspath(mozpath.join(base_path, path))),
            mozpath.normsep(path)
        ))
    return h.hexdigest()
Ejemplo n.º 48
0
    def __init__(self, *args, **kargs):
        FileFinder.__init__(self, *args, **kargs)
        self.files = FileRegistry()
        self.kind = 'flat'
        self.omnijar = None
        self.jarlogs = {}
        self.optimizedjars = False

        jars = set()

        for p, f in FileFinder.find(self, '*'):
            # Skip the precomplete file, which is generated at packaging time.
            if p == 'precomplete':
                continue
            base = mozpack.path.dirname(p)
            # If the file is a zip/jar that is not a .xpi, and contains a
            # chrome.manifest, it is an omnijar. All the files it contains
            # go in the directory containing the omnijar. Manifests are merged
            # if there is a corresponding manifest in the directory.
            if not p.endswith('.xpi') and self._maybe_zip(f) and \
                    (mozpack.path.basename(p) == self.omnijar or
                     not self.omnijar):
                jar = self._open_jar(p, f)
                if 'chrome.manifest' in jar:
                    self.kind = 'omni'
                    self.omnijar = mozpack.path.basename(p)
                    self._fill_with_omnijar(base, jar)
                    continue
            # If the file is a manifest, scan its entries for some referencing
            # jar: urls. If there are some, the files contained in the jar they
            # point to, go under a directory named after the jar.
            if is_manifest(p):
                m = self.files[p] if self.files.contains(p) \
                    else ManifestFile(base)
                for e in parse_manifest(self.base, p, f.open()):
                    m.add(self._handle_manifest_entry(e, jars))
                if self.files.contains(p):
                    continue
                f = m
            if not p in jars:
                self.files.add(p, f)
Ejemplo n.º 49
0
def run_linter(python, paths, config, **lintargs):
    binary = find_executable(python)
    if not binary:
        # TODO bootstrap python3 if not available
        print(
            'error: {} not detected, aborting py-compat check'.format(python))
        if 'MOZ_AUTOMATION' in os.environ:
            return 1
        return []

    root = lintargs['root']
    pattern = "**/*.py"
    exclude = [mozpath.join(root, e) for e in lintargs.get('exclude', [])]
    files = []
    for path in paths:
        path = mozpath.normsep(path)
        if os.path.isfile(path):
            files.append(path)
            continue

        ignore = [
            e[len(path):].lstrip('/') for e in exclude
            if mozpath.commonprefix((path, e)) == path
        ]
        finder = FileFinder(path, ignore=ignore)
        files.extend([os.path.join(path, p) for p, f in finder.find(pattern)])

    with tempfile.NamedTemporaryFile(mode='w') as fh:
        fh.write('\n'.join(files))
        fh.flush()

        cmd = [binary, os.path.join(here, 'check_compat.py'), fh.name]

        proc = PyCompatProcess(config, cmd)
        proc.run()
        try:
            proc.wait()
        except KeyboardInterrupt:
            proc.kill()

    return results
Ejemplo n.º 50
0
    def test_ignored_files(self):
        """Ignored files should not have results returned."""
        self.prepare_match_test()

        # Be sure prefix match doesn't get ignored.
        self.add('barz')

        self.finder = FileFinder(self.tmpdir, ignore=['foo/bar', 'bar'])
        self.do_check('**', ['barz', 'foo/baz', 'foo/qux/1', 'foo/qux/2/test',
            'foo/qux/2/test2', 'foo/qux/bar'])
        self.do_check('foo/**', ['foo/baz', 'foo/qux/1', 'foo/qux/2/test',
            'foo/qux/2/test2', 'foo/qux/bar'])
Ejemplo n.º 51
0
def resolve_files():
    """Resolve the files that constitute a standalone toolchain.

    This is a generator of (dest path, file) where the destination
    path is relative and the file instance is a BaseFile from mozpack.
    """
    vs_path, sdk_path = find_vs_paths()

    for entry in VS_PATTERNS:
        finder = FileFinder(vs_path, ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            assert p.startswith(('VC/', 'DIA SDK/'))

            yield p.encode('utf-8'), f

    for entry in SDK_PATTERNS:
        finder = FileFinder(sdk_path, ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            relpath = 'SDK/%s' % p

            yield relpath.encode('utf-8'), f
Ejemplo n.º 52
0
def resolve_files():
    """Resolve the files that constitute a standalone toolchain.

    This is a generator of (dest path, file) where the destination
    path is relative and the file instance is a BaseFile from mozpack.
    """
    vs_path, sdk_path = find_vs_paths()

    for entry in VS_PATTERNS:
        finder = FileFinder(vs_path,
                            find_executables=False,
                            ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            assert p.startswith(('VC/', 'DIA SDK/'))

            for source, dest in entry.get('rewrite', []):
                p = p.replace(source, dest)

            yield p.encode('utf-8'), f

    for entry in SDK_PATTERNS:
        finder = FileFinder(sdk_path,
                            find_executables=False,
                            ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            # We remove the SDK version from the path so we don't have
            # to update other configs when we change the SDK version.
            p = p.replace('/%s/' % SDK_RELEASE, '/')
            relpath = 'SDK/%s' % p

            yield relpath.encode('utf-8'), f
Ejemplo n.º 53
0
    def _extract(self, src, dest, keep_extra_files=False):
        """extract source distribution into vendor directory"""

        ignore = ()
        if not keep_extra_files:
            ignore = (
                "*/doc",
                "*/docs",
                "*/test",
                "*/tests",
            )
        finder = FileFinder(src)
        for path, _ in finder.find("*"):
            base, ext = os.path.splitext(path)
            # packages extract into package-version directory name and we strip the version
            tld = mozfile.extract(os.path.join(finder.base, path),
                                  dest,
                                  ignore=ignore)[0]
            target = os.path.join(dest, tld.rpartition("-")[0])
            mozfile.remove(
                target)  # remove existing version of vendored package
            mozfile.move(tld, target)

            # If any files inside the vendored package were symlinks, turn them into normal files
            # because hg.mozilla.org forbids symlinks in the repository.
            link_finder = FileFinder(target)
            for _, f in link_finder.find("**"):
                if os.path.islink(f.path):
                    link_target = os.path.realpath(f.path)
                    os.unlink(f.path)
                    shutil.copyfile(link_target, f.path)
Ejemplo n.º 54
0
def resolve_files():
    """Resolve the files that constitute a standalone toolchain.

    This is a generator of (dest path, file) where the destination
    path is relative and the file instance is a BaseFile from mozpack.
    """
    vs_path, sdk_path = find_vs_paths()

    for entry in VS_PATTERNS:
        finder = FileFinder(vs_path,
                            find_executables=False,
                            ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            assert p.startswith(('VC/', 'DIA SDK/'))

            yield p.encode('utf-8'), f

    for entry in SDK_PATTERNS:
        finder = FileFinder(sdk_path,
                            find_executables=False,
                            ignore=entry.get('ignore', []))
        for p, f in finder.find(entry['pattern']):
            relpath = 'SDK/%s' % p

            yield relpath.encode('utf-8'), f
def run_linter(python, paths, config, **lintargs):
    binary = find_executable(python)
    if not binary:
        # If we're in automation, this is fatal. Otherwise, the warning in the
        # setup method was already printed.
        if 'MOZ_AUTOMATION' in os.environ:
            return 1
        return []

    root = lintargs['root']
    pattern = "**/*.py"
    exclude = [mozpath.join(root, e) for e in lintargs.get('exclude', [])]
    files = []
    for path in paths:
        path = mozpath.normsep(path)
        if os.path.isfile(path):
            files.append(path)
            continue

        ignore = [
            e[len(path):].lstrip('/') for e in exclude
            if mozpath.commonprefix((path, e)) == path
        ]
        finder = FileFinder(path, ignore=ignore)
        files.extend([os.path.join(path, p) for p, f in finder.find(pattern)])

    with mozfile.NamedTemporaryFile(mode='w') as fh:
        fh.write('\n'.join(files))
        fh.flush()

        cmd = [binary, os.path.join(here, 'check_compat.py'), fh.name]

        proc = PyCompatProcess(config, cmd)
        proc.run()
        try:
            proc.wait()
        except KeyboardInterrupt:
            proc.kill()

    return results
Ejemplo n.º 56
0
    def _extract(self, src, dest):
        """extract source distribution into vendor directory"""
        finder = FileFinder(src)
        for path, _ in finder.find('*'):
            base, ext = os.path.splitext(path)
            if ext == '.whl':
                # Wheels would extract into a directory with the name of the package, but
                # we want the platform signifiers, minus the version number.
                # Wheel filenames look like:
                # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}
                bits = base.split('-')

                # Remove the version number.
                bits.pop(1)
                target = os.path.join(dest, '-'.join(bits))
                mozfile.remove(
                    target)  # remove existing version of vendored package
                os.mkdir(target)
                mozfile.extract(os.path.join(finder.base, path), target)
            else:
                # packages extract into package-version directory name and we strip the version
                tld = mozfile.extract(os.path.join(finder.base, path), dest)[0]
                target = os.path.join(dest, tld.rpartition('-')[0])
                mozfile.remove(
                    target)  # remove existing version of vendored package
                mozfile.move(tld, target)
            # If any files inside the vendored package were symlinks, turn them into normal files
            # because hg.mozilla.org forbids symlinks in the repository.
            link_finder = FileFinder(target)
            for _, f in link_finder.find('**'):
                if os.path.islink(f.path):
                    link_target = os.path.realpath(f.path)
                    os.unlink(f.path)
                    shutil.copyfile(link_target, f.path)
Ejemplo n.º 57
0
def get_generated_sources():
    '''
    Yield tuples of `(objdir-rel-path, file)` for generated source files
    in this objdir, where `file` is either an absolute path to the file or
    a `mozpack.File` instance.
    '''
    import buildconfig

    # First, get the list of generated sources produced by the build backend.
    gen_sources = os.path.join(buildconfig.topobjdir, 'generated-sources.json')
    with open(gen_sources, 'rb') as f:
        data = json.load(f)
    for f in data['sources']:
        yield f, mozpath.join(buildconfig.topobjdir, f)
    # Next, return all the files in $objdir/ipc/ipdl/_ipdlheaders.
    base = 'ipc/ipdl/_ipdlheaders'
    finder = FileFinder(mozpath.join(buildconfig.topobjdir, base))
    for p, f in finder.find('**/*.h'):
        yield mozpath.join(base, p), f
    # Next, return any Rust source files that were generated into the Rust
    # object directory.
    rust_build_kind = 'debug' if buildconfig.substs.get(
        'MOZ_DEBUG_RUST') else 'release'
    base = mozpath.join(buildconfig.substs['RUST_TARGET'], rust_build_kind,
                        'build')
    finder = FileFinder(mozpath.join(buildconfig.topobjdir, base))
    for p, f in finder.find('**/*.rs'):
        yield mozpath.join(base, p), f
    def add_names(names, defaults={}):
        for name in names:
            site = copy.deepcopy(defaults)
            site.update(properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')))
            site['imageurl'] = image_url_template.format(name=name)
            sites.append(site)

            # Now check for existence of an appropriately named drawable.  If none
            # exists, throw.  This stops a locale discovering, at runtime, that the
            # corresponding drawable was not added to en-US.
            if not opts.resources:
                continue
            resources = os.path.abspath(opts.resources)
            finder = FileFinder(resources)
            matches = [p for p, _ in finder.find(drawables_template.format(name=name))]
            if not matches:
                raise Exception("Could not find drawable in '{resources}' for '{name}'"
                    .format(resources=resources, name=name))
            else:
                if opts.verbose:
                    print("Found {len} drawables in '{resources}' for '{name}': {matches}"
                          .format(len=len(matches), resources=resources, name=name, matches=matches))
Ejemplo n.º 59
0
def get_application_ini_values(finder_or_application_directory, *args):
    """Find multiple strings for given `section` and `value` pairs.
    Additional `args` should be dictionaries with keys `section`, `value`,
    and optional `fallback`.  Returns an iterable of strings, one for each
    dictionary provided.

    `fallback` is treated as with `get_application_ini_value`.

    Raises an `Exception` if any string is not found."""

    if isinstance(finder_or_application_directory, string_types):
        finder = FileFinder(finder_or_application_directory)
    else:
        finder = finder_or_application_directory

    # Packages usually have a top-level `firefox/` directory; search below it.
    for p, f in finder.find("**/application.ini"):
        data = f.open().read().decode("utf-8")
        parser = configparser.ConfigParser()
        parser.read_string(data)

        for d in args:
            rc = None
            try:
                rc = parser.get(d["section"], d["value"])
            except configparser.NoOptionError:
                if "fallback" not in d:
                    raise
                else:
                    rc = parser.get(d["section"], d["fallback"])

            if rc is None:
                raise Exception(
                    "Input does not contain an application.ini file")

            yield rc

        # Process only the first `application.ini`.
        break