Пример #1
0
    def process_tests_artifact(self, filename, processed_filename):
        from mozbuild.action.test_archive import OBJDIR_TEST_FILES
        added_entry = False

        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            reader = JarReader(filename)
            for filename, entry in reader.entries.iteritems():
                for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
                    if not mozpath.match(filename, pattern):
                        continue
                    destpath = mozpath.relpath(filename, src_prefix)
                    destpath = mozpath.join(dest_prefix, destpath)
                    self.log(logging.INFO, 'artifact',
                             {'destpath': destpath},
                             'Adding {destpath} to processed archive')
                    mode = entry['external_attr'] >> 16
                    writer.add(destpath.encode('utf-8'), reader[filename], mode=mode)
                    added_entry = True
                    break
                for files_entry in OBJDIR_TEST_FILES.values():
                    origin_pattern = files_entry['pattern']
                    leaf_filename = filename
                    if 'dest' in files_entry:
                        dest = files_entry['dest']
                        origin_pattern = mozpath.join(dest, origin_pattern)
                        leaf_filename = filename[len(dest) + 1:]
                    if mozpath.match(filename, origin_pattern):
                        destpath = mozpath.join('..', files_entry['base'], leaf_filename)
                        mode = entry['external_attr'] >> 16
                        writer.add(destpath.encode('utf-8'), reader[filename], mode=mode)

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}"'
                             'matched an archive path.'.format(
                                 patterns=LinuxArtifactJob.test_artifact_patterns))
Пример #2
0
    def _find_glob(self, base, pattern):
        """
        Actual implementation of FileFinder.find() when the given pattern
        contains globbing patterns ('*' or '**'). This is meant to be an
        equivalent of:
            for p, f in self:
                if mozpath.match(p, pattern):
                    yield p, f
        but avoids scanning the entire tree.
        """
        if not pattern:
            for p, f in self._find(base):
                yield p, f
        elif pattern[0] == "**":
            for p, f in self._find(base):
                if mozpath.match(p, mozpath.join(*pattern)):
                    yield p, f
        elif "*" in pattern[0]:
            if not os.path.exists(os.path.join(self.base, base)):
                return

            for p in self.ignore:
                if mozpath.match(base, p):
                    return

            # See above comment w.r.t. sorted() and idempotent behavior.
            for p in sorted(os.listdir(os.path.join(self.base, base))):
                if p.startswith(".") and not pattern[0].startswith("."):
                    continue
                if mozpath.match(p, pattern[0]):
                    for p_, f in self._find_glob(mozpath.join(base, p), pattern[1:]):
                        yield p_, f
        else:
            for p, f in self._find_glob(mozpath.join(base, pattern[0]), pattern[1:]):
                yield p, f
Пример #3
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False

        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            with tarfile.open(filename) as reader:
                for f in reader:
                    if not f.isfile():
                        continue

                    if not any(mozpath.match(f.name, p) for p in self.package_artifact_patterns):
                        continue

                    # We strip off the relative "firefox/" bit from the path,
                    # but otherwise preserve it.
                    destpath = mozpath.join('bin',
                                            mozpath.relpath(f.name, "firefox"))
                    self.log(logging.INFO, 'artifact',
                             {'destpath': destpath},
                             'Adding {destpath} to processed archive')
                    writer.add(destpath.encode('utf-8'), reader.extractfile(f), mode=f.mode)
                    added_entry = True

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}" '
                             'matched an archive path.'.format(
                                 patterns=LinuxArtifactJob.package_artifact_patterns))
Пример #4
0
 def is_resource(self, path, base=None):
     '''
     Return whether the given path corresponds to a resource to be put in an
     omnijar archive.
     '''
     if base is None:
         base = self._get_base(path)
     path = mozpath.relpath(path, base)
     if any(mozpath.match(path, p.replace('*', '**'))
            for p in self._non_resources):
         return False
     path = mozpath.split(path)
     if path[0] == 'chrome':
         return len(path) == 1 or path[1] != 'icons'
     if path[0] == 'components':
         return path[-1].endswith(('.js', '.xpt'))
     if path[0] == 'res':
         return len(path) == 1 or \
             (path[1] != 'cursors' and path[1] != 'MainMenu.nib')
     if path[0] == 'defaults':
         return len(path) != 3 or \
             not (path[2] == 'channel-prefs.js' and
                  path[1] in ['pref', 'preferences'])
     return path[0] in [
         'modules',
         'greprefs.js',
         'hyphenation',
         'update.locale',
     ] or path[0] in STARTUP_CACHE_PATHS
Пример #5
0
 def match(self, patterns):
     a = mozpath.normsep(self.path)
     for p in patterns:
         if isinstance(p, FilterPath):
             p = p.path
         p = mozpath.normsep(p)
         if mozpath.match(a, p):
             return True
     return False
Пример #6
0
    def get(self, path):
        srcpath = os.path.join(self.base, path)
        if not os.path.lexists(srcpath):
            return None

        for p in self.ignore:
            if mozpath.match(path, p):
                return None

        if self.find_executables and is_executable(srcpath):
            return ExecutableFile(srcpath)
        else:
            return File(srcpath)
Пример #7
0
def make_archive(archive_name, base, exclude, include, compress):
    finder = FileFinder(base, ignore=exclude)
    if not include:
        include = ['*']
    if not compress:
        compress = ['**/*.sym']
    archive_basename = os.path.basename(archive_name)
    with open(archive_name, 'wb') as fh:
        with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer:
            for pat in include:
                for p, f in finder.find(pat):
                    print('  Adding to "%s":\n\t"%s"' % (archive_basename, p))
                    should_compress = any(mozpath.match(p, pat) for pat in compress)
                    writer.add(p.encode('utf-8'), f, mode=f.mode,
                               compress=should_compress, skip_duplicates=True)
Пример #8
0
 def unify_file(self, path, file1, file2):
     '''
     Given two BaseFiles and the path they were found at, check whether
     their content match and return the first BaseFile if they do.
     '''
     content1 = file1.open().readlines()
     content2 = file2.open().readlines()
     if content1 == content2:
         return file1
     for pattern in self._sorted:
         if mozpath.match(path, pattern):
             if sorted(content1) == sorted(content2):
                 return file1
             break
     return None
Пример #9
0
 def match(self, pattern):
     '''
     Return the list of paths, stored in the container, matching the
     given pattern. See the mozpack.path.match documentation for a
     description of the handled patterns.
     '''
     if '*' in pattern:
         return [p for p in self.paths()
                 if mozpath.match(p, pattern)]
     if pattern == '':
         return self.paths()
     if pattern in self._files:
         return [pattern]
     return [p for p in self.paths()
             if mozpath.basedir(p, [pattern]) == pattern]
Пример #10
0
    def _find_file(self, path):
        '''
        Actual implementation of FileFinder.find() when the given pattern
        corresponds to an existing file under the base directory.
        '''
        srcpath = os.path.join(self.base, path)
        if not os.path.exists(srcpath):
            return

        for p in self.ignore:
            if mozpath.match(path, p):
                return

        if self.find_executables and is_executable(srcpath):
            yield path, ExecutableFile(srcpath)
        else:
            yield path, File(srcpath)
Пример #11
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False
        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            for f in JarReader(filename):
                if not any(mozpath.match(f.filename, p) for p in self.package_artifact_patterns):
                    continue

                basename = mozpath.basename(f.filename)
                self.log(logging.INFO, "artifact", {"basename": basename}, "Adding {basename} to processed archive")
                writer.add(basename.encode("utf-8"), f)
                added_entry = True

        if not added_entry:
            raise ValueError(
                'Archive format changed! No pattern from "{patterns}"'
                "matched an archive path.".format(patterns=self.artifact_patterns)
            )
Пример #12
0
 def do_finder_test(self, finder):
     self.assertTrue(finder.contains('foo/.foo'))
     self.assertTrue(finder.contains('foo/.bar'))
     self.assertTrue('foo/.foo' in [f for f, c in
                                    finder.find('foo/.foo')])
     self.assertTrue('foo/.bar/foo' in [f for f, c in
                                        finder.find('foo/.bar')])
     self.assertEqual(sorted([f for f, c in finder.find('foo/.*')]),
                      ['foo/.bar/foo', 'foo/.foo'])
     for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']:
         self.assertFalse('foo/.foo' in [f for f, c in
                                         finder.find(pattern)])
         self.assertFalse('foo/.bar/foo' in [f for f, c in
                                             finder.find(pattern)])
         self.assertEqual(sorted([f for f, c in finder.find(pattern)]),
                          sorted([f for f, c in finder
                                  if mozpath.match(f, pattern)]))
Пример #13
0
    def process_artifact(self, filename, processed_filename):
        # Extract all .so files into the root, which will get copied into dist/bin.
        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                    continue

                dirname, basename = os.path.split(p)
                self.log(logging.INFO, 'artifact',
                    {'basename': basename},
                   'Adding {basename} to processed archive')

                basedir = 'bin'
                if not basename.endswith('.so'):
                    basedir = mozpath.join('bin', dirname.lstrip('assets/'))
                basename = mozpath.join(basedir, basename)
                writer.add(basename.encode('utf-8'), f.open())
Пример #14
0
 def _find(self, pattern):
     '''
     Actual implementation of JarFinder.find(), dispatching to specialized
     member functions depending on what kind of pattern was given.
     '''
     if '*' in pattern:
         for p in self._files:
             if mozpath.match(p, pattern):
                 yield p, DeflatedFile(self._files[p])
     elif pattern == '':
         for p in self._files:
             yield p, DeflatedFile(self._files[p])
     elif pattern in self._files:
         yield pattern, DeflatedFile(self._files[pattern])
     else:
         for p in self._files:
             if mozpath.basedir(p, [pattern]) == pattern:
                 yield p, DeflatedFile(self._files[p])
Пример #15
0
    def _find_dir(self, path):
        '''
        Actual implementation of FileFinder.find() when the given pattern
        corresponds to an existing directory under the base directory.
        Ignores file names starting with a '.' under the given path. If the
        path itself has leafs starting with a '.', they are not ignored.
        '''
        for p in self.ignore:
            if mozpath.match(path, p):
                return

        # The sorted makes the output idempotent. Otherwise, we are
        # likely dependent on filesystem implementation details, such as
        # inode ordering.
        for p in sorted(os.listdir(os.path.join(self.base, path))):
            if p.startswith('.'):
                continue
            for p_, f in self._find(mozpath.join(path, p)):
                yield p_, f
Пример #16
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False
        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            for f in JarReader(filename):
                if not any(mozpath.match(f.filename, p) for p in self.package_artifact_patterns):
                    continue

                # strip off the relative "firefox/" bit from the path:
                basename = mozpath.relpath(f.filename, "firefox")
                self.log(logging.INFO, 'artifact',
                    {'basename': basename},
                    'Adding {basename} to processed archive')
                writer.add(basename.encode('utf-8'), f)
                added_entry = True

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}"'
                             'matched an archive path.'.format(
                                 patterns=self.artifact_patterns))
Пример #17
0
    def files_info(self, paths):
        """Obtain aggregate data from Files for a set of files.

        Given a set of input paths, determine which moz.build files may
        define metadata for them, evaluate those moz.build files, and
        apply file metadata rules defined within to determine metadata
        values for each file requested.

        Essentially, for each input path:

        1. Determine the set of moz.build files relevant to that file by
           looking for moz.build files in ancestor directories.
        2. Evaluate moz.build files starting with the most distant.
        3. Iterate over Files sub-contexts.
        4. If the file pattern matches the file we're seeking info on,
           apply attribute updates.
        5. Return the most recent value of attributes.
        """
        paths, _ = self.read_relevant_mozbuilds(paths)

        r = {}

        for path, ctxs in paths.items():
            flags = Files(Context())

            for ctx in ctxs:
                if not isinstance(ctx, Files):
                    continue

                relpath = mozpath.relpath(path, ctx.relsrcdir)
                pattern = ctx.pattern

                # Only do wildcard matching if the '*' character is present.
                # Otherwise, mozpath.match will match directories, which we've
                # arbitrarily chosen to not allow.
                if pattern == relpath or \
                        ('*' in pattern and mozpath.match(relpath, pattern)):
                    flags += ctx

            r[path] = flags

        return r
Пример #18
0
    def process_tests_artifact(self, filename, processed_filename):
        added_entry = False

        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            reader = JarReader(filename)
            for filename, entry in reader.entries.iteritems():
                for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
                    if not mozpath.match(filename, pattern):
                        continue
                    destpath = mozpath.relpath(filename, src_prefix)
                    destpath = mozpath.join(dest_prefix, destpath)
                    self.log(logging.INFO, "artifact", {"destpath": destpath}, "Adding {destpath} to processed archive")
                    mode = entry["external_attr"] >> 16
                    writer.add(destpath.encode("utf-8"), reader[filename], mode=mode)
                    added_entry = True

        if not added_entry:
            raise ValueError(
                'Archive format changed! No pattern from "{patterns}"'
                "matched an archive path.".format(patterns=LinuxArtifactJob.test_artifact_patterns)
            )
Пример #19
0
    def _find_helper(self, pattern, files, file_getter):
        """Generic implementation of _find.

        A few *Finder implementations share logic for returning results.
        This function implements the custom logic.

        The ``file_getter`` argument is a callable that receives a path
        that is known to exist. The callable should return a ``BaseFile``
        instance.
        """
        if '*' in pattern:
            for p in files:
                if mozpath.match(p, pattern):
                    yield p, file_getter(p)
        elif pattern == '':
            for p in files:
                yield p, file_getter(p)
        elif pattern in files:
            yield pattern, file_getter(pattern)
        else:
            for p in files:
                if mozpath.basedir(p, [pattern]) == pattern:
                    yield p, file_getter(p)
Пример #20
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument("-C", metavar='DIR', default=".",
                        help="Change to given directory before considering "
                        "other paths")
    parser.add_argument("--strip", action='store_true',
                        help="Strip executables")
    parser.add_argument("-x", metavar='EXCLUDE', default=[], action='append',
                        help="Exclude files that match the pattern")
    parser.add_argument("zip", help="Path to zip file to write")
    parser.add_argument("input", nargs="+",
                        help="Path to files to add to zip")
    args = parser.parse_args(args)

    jarrer = Jarrer(optimize=False)

    with errors.accumulate():
        finder = FileFinder(args.C, find_executables=args.strip)
        for path in args.input:
            for p, f in finder.find(path):
                if not any([match(p, exclude) for exclude in args.x]):
                    jarrer.add(p, f)
        jarrer.copy(mozpath.join(args.C, args.zip))
Пример #21
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False

        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            with tarfile.open(filename) as reader:
                for f in reader:
                    if not f.isfile():
                        continue

                    if not any(mozpath.match(f.name, p) for p in self.package_artifact_patterns):
                        continue

                    basename = mozpath.basename(f.name)
                    self.log(logging.INFO, 'artifact',
                             {'basename': basename},
                             'Adding {basename} to processed archive')
                    writer.add(basename.encode('utf-8'), reader.extractfile(f), mode=f.mode)
                    added_entry = True

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}" '
                             'matched an archive path.'.format(
                                 patterns=LinuxArtifactJob.package_artifact_patterns))
Пример #22
0
 def find(self, path):
     self.log.append(path)
     for f in sorted(self.files):
         if mozpath.match(f, path):
             yield f, self.files[f]
Пример #23
0
    def resolve_tests(self, paths=None, flavor=None, subsuite=None, under_path=None,
                      tags=None):
        """Resolve tests from an identifier.

        This is a generator of dicts describing each test.

        ``paths`` can be an iterable of values to use to identify tests to run.
        If an entry is a known test file, tests associated with that file are
        returned (there may be multiple configurations for a single file). If
        an entry is a directory, or a prefix of a directory containing tests,
        all tests in that directory are returned. If the string appears in a
        known test file, that test file is considered. If the path contains
        a wildcard pattern, tests matching that pattern are returned.

        If ``under_path`` is a string, it will be used to filter out tests that
        aren't in the specified path prefix relative to topsrcdir or the
        test's installed dir.

        If ``flavor`` is a string, it will be used to filter returned tests
        to only be the flavor specified. A flavor is something like
        ``xpcshell``.

        If ``subsuite`` is a string, it will be used to filter returned tests
        to only be in the subsuite specified.

        If ``tags`` are specified, they will be used to filter returned tests
        to only those with a matching tag.
        """
        if tags:
            tags = set(tags)

        def fltr(tests):
            for test in tests:
                if flavor:
                   if (flavor == 'devtools' and test.get('flavor') != 'browser-chrome') or \
                      (flavor != 'devtools' and test.get('flavor') != flavor):
                    continue

                if subsuite and test.get('subsuite') != subsuite:
                    continue

                if tags and not (tags & set(test.get('tags', '').split())):
                    continue

                if under_path \
                    and not test['file_relpath'].startswith(under_path):
                    continue

                # Make a copy so modifications don't change the source.
                yield dict(test)

        paths = paths or []
        paths = [mozpath.normpath(p) for p in paths]
        if not paths:
            paths = [None]

        candidate_paths = set()

        for path in sorted(paths):
            if path is None:
                candidate_paths |= set(self._tests_by_path.keys())
                continue

            if '*' in path:
                candidate_paths |= {p for p in self._tests_by_path
                                    if mozpath.match(p, path)}
                continue

            # If the path is a directory, or the path is a prefix of a directory
            # containing tests, pull in all tests in that directory.
            if (path in self._test_dirs or
                any(p.startswith(path) for p in self._tests_by_path)):
                candidate_paths |= {p for p in self._tests_by_path
                                    if p.startswith(path)}
                continue

            # If it's a test file, add just that file.
            candidate_paths |= {p for p in self._tests_by_path if path in p}

        for p in sorted(candidate_paths):
            tests = self._tests_by_path[p]

            for test in fltr(tests):
                yield test
Пример #24
0
def find_files(archive):
    extra_entries = []
    generated_harness_files = find_generated_harness_files()

    if archive == 'common':
        # Construct entries ensuring all our generated harness files are
        # packaged in the common tests zip.
        packaged_paths = set()
        for entry in OBJDIR_TEST_FILES.values():
            pat = mozpath.join(entry['base'], entry['pattern'])
            del entry['pattern']
            patterns = []
            for path in generated_harness_files:
                if mozpath.match(path, pat):
                    patterns.append(path[len(entry['base']) + 1:])
                    packaged_paths.add(path)
            if patterns:
                entry['patterns'] = patterns
                extra_entries.append(entry)
        entry = {
            'source': buildconfig.topobjdir,
            'base': '_tests',
            'patterns': [],
        }
        for path in set(generated_harness_files) - packaged_paths:
            entry['patterns'].append(path[len('_tests') + 1:])
        extra_entries.append(entry)

    for entry in ARCHIVE_FILES[archive] + extra_entries:
        source = entry['source']
        dest = entry.get('dest')
        base = entry.get('base', '')

        pattern = entry.get('pattern')
        patterns = entry.get('patterns', [])
        if pattern:
            patterns.append(pattern)

        manifest = entry.get('manifest')
        manifests = entry.get('manifests', [])
        if manifest:
            manifests.append(manifest)
        if manifests:
            dirs = find_manifest_dirs(buildconfig.topsrcdir, manifests)
            patterns.extend({'{}/**'.format(d) for d in dirs})

        ignore = list(entry.get('ignore', []))
        ignore.extend([
            '**/.flake8',
            '**/.mkdir.done',
            '**/*.pyc',
        ])

        if archive != 'common' and base.startswith('_tests'):
            # We may have generated_harness_files to exclude from this entry.
            for path in generated_harness_files:
                if path.startswith(base):
                    ignore.append(path[len(base) + 1:])

        common_kwargs = {
            'find_dotfiles': True,
            'ignore': ignore,
        }

        finder = FileFinder(os.path.join(source, base), **common_kwargs)

        for pattern in patterns:
            for p, f in finder.find(pattern):
                if dest:
                    p = mozpath.join(dest, p)
                yield p, f
Пример #25
0
 def test_match(self):
     self.assertTrue(match("foo", ""))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar"))
     self.assertTrue(match("foo/bar/baz.qux", "foo"))
     self.assertTrue(match("foo", "*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/*/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "*/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "*/*/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "*/*/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/*/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/*/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/b*/*z.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/b*r/ba*z.qux"))
     self.assertFalse(match("foo/bar/baz.qux", "foo/b*z/ba*r.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**"))
     self.assertTrue(match("foo/bar/baz.qux", "**/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**/foo/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**/*.qux"))
     self.assertFalse(match("foo/bar/baz.qux", "**.qux"))
     self.assertFalse(match("foo/bar", "foo/*/bar"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/**"))
     self.assertFalse(match("foo/nobar/baz.qux", "foo/**/bar/**"))
     self.assertTrue(match("foo/bar", "foo/**/bar/**"))
Пример #26
0
 def add(self, path, content):
     if not any(mozpath.match(path, spec) for spec in self._files):
         self._formatter.add(path, content)
     else:
         self._error(self._msg % path)
Пример #27
0
    def _process_final_target_files(self, obj):
        target = obj.install_target
        if not isinstance(obj, ObjdirFiles):
            path = mozpath.basedir(target, (
                'dist/bin',
                'dist/xpi-stage',
                '_tests',
                'dist/include',
                'dist/sdk',
            ))
            if not path:
                raise Exception("Cannot install to " + target)

        if target.startswith('_tests'):
            # TODO: TEST_HARNESS_FILES present a few challenges for the tup
            # backend (bug 1372381).
            return

        for path, files in obj.files.walk():
            for f in files:
                if not isinstance(f, ObjDirPath):
                    backend_file = self._get_backend_file(mozpath.join(target, path))
                    if '*' in f:
                        if f.startswith('/') or isinstance(f, AbsolutePath):
                            basepath, wild = os.path.split(f.full_path)
                            if '*' in basepath:
                                raise Exception("Wildcards are only supported in the filename part of "
                                                "srcdir-relative or absolute paths.")

                            # TODO: This is only needed for Windows, so we can
                            # skip this for now.
                            pass
                        else:
                            def _prefix(s):
                                for p in mozpath.split(s):
                                    if '*' not in p:
                                        yield p + '/'
                            prefix = ''.join(_prefix(f.full_path))
                            self.backend_input_files.add(prefix)
                            finder = FileFinder(prefix)
                            for p, _ in finder.find(f.full_path[len(prefix):]):
                                backend_file.symlink_rule(mozpath.join(prefix, p),
                                                          output=mozpath.join(f.target_basename, p),
                                                          output_group=self._installed_files)
                    else:
                        backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=self._installed_files)
                else:
                    if (self.environment.is_artifact_build and
                        any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
                        # If we have an artifact build we never would have generated this file,
                        # so do not attempt to install it.
                        continue

                    # We're not generating files in these directories yet, so
                    # don't attempt to install files generated from them.
                    if f.context.relobjdir not in ('layout/style/test',
                                                   'toolkit/library'):
                        output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path,
                                              f.target_basename)
                        gen_backend_file = self._get_backend_file(f.context.relobjdir)
                        gen_backend_file.symlink_rule(f.full_path, output=output,
                                                      output_group=self._installed_files)
Пример #28
0
 def match(self, patterns):
     return any(mozpath.match(self.path, pattern.path) for pattern in patterns)
Пример #29
0
    def consume_object(self, obj):
        """Write out build files necessary to build with tup."""

        if not isinstance(obj, ContextDerived):
            return False

        consumed = CommonBackend.consume_object(self, obj)
        if consumed:
            return True

        backend_file = self._get_backend_file_for(obj)

        if isinstance(obj, GeneratedFile):
            skip_files = []

            if self.environment.is_artifact_build:
                skip_files = self._compile_env_gen

            for f in obj.outputs:
                if any(mozpath.match(f, p) for p in skip_files):
                    return False

            if backend_file.requires_delay(obj.inputs):
                backend_file.delayed_generated_files.append(obj)
            else:
                self._process_generated_file(backend_file, obj)
        elif (isinstance(obj, ChromeManifestEntry) and
              obj.install_target.startswith(('dist/bin', 'dist/xpi-stage'))):
            # The quitter extension specifies its chrome.manifest as a
            # FINAL_TARGET_FILE, which conflicts with the manifest generation
            # we do here, so skip it for now.
            if obj.install_target != 'dist/xpi-stage/quitter':
                top_level = mozpath.join(obj.install_target, 'chrome.manifest')
                if obj.path != top_level:
                    entry = 'manifest %s' % mozpath.relpath(obj.path,
                                                            obj.install_target)
                    self._manifest_entries[top_level].add(entry)
                self._manifest_entries[obj.path].add(str(obj.entry))
        elif isinstance(obj, Defines):
            self._process_defines(backend_file, obj)
        elif isinstance(obj, HostDefines):
            self._process_defines(backend_file, obj, host=True)
        elif isinstance(obj, FinalTargetFiles):
            self._process_final_target_files(obj)
        elif isinstance(obj, FinalTargetPreprocessedFiles):
            self._process_final_target_pp_files(obj, backend_file)
        elif isinstance(obj, JARManifest):
            self._consume_jar_manifest(obj)
        elif isinstance(obj, PerSourceFlag):
            backend_file.per_source_flags[obj.file_name].extend(obj.flags)
        elif isinstance(obj, ComputedFlags):
            self._process_computed_flags(obj, backend_file)
        elif isinstance(obj, (Sources, GeneratedSources)):
            backend_file.sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, HostSources):
            backend_file.host_sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, VariablePassthru):
            backend_file.variables = obj.variables
        elif isinstance(obj, RustLibrary):
            self._gen_rust_rules(obj, backend_file)
        elif isinstance(obj, StaticLibrary):
            backend_file.static_lib = obj
        elif isinstance(obj, SharedLibrary):
            backend_file.shared_lib = obj
        elif isinstance(obj, (HostProgram, HostSimpleProgram)):
            backend_file.host_programs.append(obj)
        elif isinstance(obj, HostLibrary):
            backend_file.host_library = obj
        elif isinstance(obj, (Program, SimpleProgram)):
            backend_file.programs.append(obj)
        elif isinstance(obj, DirectoryTraversal):
            pass

        return True
Пример #30
0
    def _process_final_target_files(self, obj):
        target = obj.install_target
        if not isinstance(obj, ObjdirFiles):
            path = mozpath.basedir(target, (
                'dist/bin',
                'dist/xpi-stage',
                '_tests',
                'dist/include',
                'dist/sdk',
            ))
            if not path:
                raise Exception("Cannot install to " + target)

        if target.startswith('_tests'):
            # TODO: TEST_HARNESS_FILES present a few challenges for the tup
            # backend (bug 1372381).
            return

        for path, files in obj.files.walk():
            for f in files:
                if not isinstance(f, ObjDirPath):
                    backend_file = self._get_backend_file(
                        mozpath.join(target, path))
                    if '*' in f:
                        if f.startswith('/') or isinstance(f, AbsolutePath):
                            basepath, wild = os.path.split(f.full_path)
                            if '*' in basepath:
                                raise Exception(
                                    "Wildcards are only supported in the filename part of "
                                    "srcdir-relative or absolute paths.")

                            # TODO: This is only needed for Windows, so we can
                            # skip this for now.
                            pass
                        else:

                            def _prefix(s):
                                for p in mozpath.split(s):
                                    if '*' not in p:
                                        yield p + '/'

                            prefix = ''.join(_prefix(f.full_path))
                            self.backend_input_files.add(prefix)
                            finder = FileFinder(prefix)
                            for p, _ in finder.find(f.full_path[len(prefix):]):
                                backend_file.symlink_rule(
                                    mozpath.join(prefix, p),
                                    output=mozpath.join(f.target_basename, p),
                                    output_group=self._installed_files)
                    else:
                        backend_file.symlink_rule(
                            f.full_path,
                            output=f.target_basename,
                            output_group=self._installed_files)
                else:
                    if (self.environment.is_artifact_build and any(
                            mozpath.match(f.target_basename, p)
                            for p in self._compile_env_gen_files)):
                        # If we have an artifact build we never would have generated this file,
                        # so do not attempt to install it.
                        continue

                    # We're not generating files in these directories yet, so
                    # don't attempt to install files generated from them.
                    if f.context.relobjdir not in ('layout/style/test',
                                                   'toolkit/library',
                                                   'js/src/shell'):
                        output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path,
                                              f.target_basename)
                        gen_backend_file = self._get_backend_file(
                            f.context.relobjdir)
                        gen_backend_file.symlink_rule(
                            f.full_path,
                            output=output,
                            output_group=self._installed_files)
Пример #31
0
    def _process_final_target_files(self, obj):
        target = obj.install_target
        if not isinstance(obj, ObjdirFiles):
            path = mozpath.basedir(target, (
                'dist/bin',
                'dist/xpi-stage',
                '_tests',
                'dist/include',
                'dist/sdk',
            ))
            if not path:
                raise Exception("Cannot install to " + target)

        for path, files in obj.files.walk():
            self._add_features(target, path)
            for f in files:
                output_group = None
                if any(mozpath.match(mozpath.basename(f), p)
                       for p in self._compile_env_files):
                    output_group = self._installed_files

                if not isinstance(f, ObjDirPath):
                    backend_file = self._get_backend_file(mozpath.join(target, path))
                    if '*' in f:
                        if f.startswith('/') or isinstance(f, AbsolutePath):
                            basepath, wild = os.path.split(f.full_path)
                            if '*' in basepath:
                                raise Exception("Wildcards are only supported in the filename part of "
                                                "srcdir-relative or absolute paths.")

                            # TODO: This is only needed for Windows, so we can
                            # skip this for now.
                            pass
                        else:
                            def _prefix(s):
                                for p in mozpath.split(s):
                                    if '*' not in p:
                                        yield p + '/'
                            prefix = ''.join(_prefix(f.full_path))
                            self.backend_input_files.add(prefix)

                            output_dir = ''
                            # If we have a RenamedSourcePath here, the common backend
                            # has generated this object from a jar manifest, and we
                            # can rely on 'path' to be our destination path relative
                            # to any wildcard match. Otherwise, the output file may
                            # contribute to our destination directory.
                            if not isinstance(f, RenamedSourcePath):
                                output_dir = ''.join(_prefix(mozpath.dirname(f)))

                            finder = FileFinder(prefix)
                            for p, _ in finder.find(f.full_path[len(prefix):]):
                                install_dir = prefix[len(obj.srcdir) + 1:]
                                output = p
                                if f.target_basename and '*' not in f.target_basename:
                                    output = mozpath.join(f.target_basename, output)
                                backend_file.symlink_rule(mozpath.join(prefix, p),
                                                          output=mozpath.join(output_dir, output),
                                                          output_group=output_group)
                    else:
                        backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group)
                else:
                    if (self.environment.is_artifact_build and
                        any(mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)):
                        # If we have an artifact build we never would have generated this file,
                        # so do not attempt to install it.
                        continue

                    output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path,
                                          f.target_basename)
                    gen_backend_file = self._get_backend_file(f.context.relobjdir)
                    if gen_backend_file.requires_delay([f]):
                        gen_backend_file.delayed_installed_files.append((f.full_path, output, output_group))
                    else:
                        gen_backend_file.symlink_rule(f.full_path, output=output,
                                                      output_group=output_group)
Пример #32
0
    def consume_object(self, obj):
        """Write out build files necessary to build with tup."""

        if not isinstance(obj, ContextDerived):
            return False

        consumed = CommonBackend.consume_object(self, obj)
        if consumed:
            return True

        backend_file = self._get_backend_file_for(obj)

        if isinstance(obj, GeneratedFile):
            # These files are already generated by make before tup runs.
            skip_files = (
                'buildid.h',
                'source-repo.h',
            )

            if self.environment.is_artifact_build:
                skip_files = skip_files + self._compile_env_gen_files

            for f in obj.outputs:
                if any(mozpath.match(f, p) for p in skip_files):
                    return False

            if 'application.ini.h' in obj.outputs:
                # application.ini.h is a special case since we need to process
                # the FINAL_TARGET_PP_FILES for application.ini before running
                # the GENERATED_FILES script, and tup doesn't handle the rules
                # out of order.
                backend_file.delayed_generated_files.append(obj)
            else:
                self._process_generated_file(backend_file, obj)
        elif (isinstance(obj, ChromeManifestEntry)
              and obj.install_target.startswith('dist/bin')):
            top_level = mozpath.join(obj.install_target, 'chrome.manifest')
            if obj.path != top_level:
                entry = 'manifest %s' % mozpath.relpath(
                    obj.path, obj.install_target)
                self._manifest_entries[top_level].add(entry)
            self._manifest_entries[obj.path].add(str(obj.entry))
        elif isinstance(obj, Defines):
            self._process_defines(backend_file, obj)
        elif isinstance(obj, HostDefines):
            self._process_defines(backend_file, obj, host=True)
        elif isinstance(obj, FinalTargetFiles):
            self._process_final_target_files(obj)
        elif isinstance(obj, FinalTargetPreprocessedFiles):
            self._process_final_target_pp_files(obj, backend_file)
        elif isinstance(obj, JARManifest):
            self._consume_jar_manifest(obj)
        elif isinstance(obj, PerSourceFlag):
            backend_file.per_source_flags[obj.file_name].extend(obj.flags)
        elif isinstance(obj, ComputedFlags):
            self._process_computed_flags(obj, backend_file)
        elif isinstance(obj, (Sources, GeneratedSources)):
            backend_file.sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, HostSources):
            backend_file.host_sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, VariablePassthru):
            backend_file.variables = obj.variables

        return True
Пример #33
0
 def test_match(self):
     self.assertTrue(match('foo', ''))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo'))
     self.assertTrue(match('foo', '*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '*/*/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux'))
     self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**'))
     self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**/*.qux'))
     self.assertFalse(match('foo/bar/baz.qux', '**.qux'))
     self.assertFalse(match('foo/bar', 'foo/*/bar'))
Пример #34
0
def find_files(archive):
    extra_entries = []
    generated_harness_files = find_generated_harness_files()

    if archive == "common":
        # Construct entries ensuring all our generated harness files are
        # packaged in the common tests archive.
        packaged_paths = set()
        for entry in OBJDIR_TEST_FILES.values():
            pat = mozpath.join(entry["base"], entry["pattern"])
            del entry["pattern"]
            patterns = []
            for path in generated_harness_files:
                if mozpath.match(path, pat):
                    patterns.append(path[len(entry["base"]) + 1:])
                    packaged_paths.add(path)
            if patterns:
                entry["patterns"] = patterns
                extra_entries.append(entry)
        entry = {
            "source": buildconfig.topobjdir,
            "base": "_tests",
            "patterns": [],
        }
        for path in set(generated_harness_files) - packaged_paths:
            entry["patterns"].append(path[len("_tests") + 1:])
        extra_entries.append(entry)

    for entry in ARCHIVE_FILES[archive] + extra_entries:
        source = entry["source"]
        dest = entry.get("dest")
        base = entry.get("base", "")

        pattern = entry.get("pattern")
        patterns = entry.get("patterns", [])
        if pattern:
            patterns.append(pattern)

        manifest = entry.get("manifest")
        manifests = entry.get("manifests", [])
        if manifest:
            manifests.append(manifest)
        if manifests:
            dirs = find_manifest_dirs(os.path.join(source, base), manifests)
            patterns.extend({"{}/**".format(d) for d in dirs})

        ignore = list(entry.get("ignore", []))
        ignore.extend([
            "**/.flake8",
            "**/.mkdir.done",
            "**/*.pyc",
        ])

        if archive not in ("common",
                           "updater-dep") and base.startswith("_tests"):
            # We may have generated_harness_files to exclude from this entry.
            for path in generated_harness_files:
                if path.startswith(base):
                    ignore.append(path[len(base) + 1:])

        common_kwargs = {
            "find_dotfiles": True,
            "ignore": ignore,
        }

        finder = FileFinder(os.path.join(source, base), **common_kwargs)

        for pattern in patterns:
            for p, f in finder.find(pattern):
                if dest:
                    p = mozpath.join(dest, p)
                yield p, f
Пример #35
0
    def consume_object(self, obj):
        """Write out build files necessary to build with tup."""

        if not isinstance(obj, ContextDerived):
            return False

        consumed = CommonBackend.consume_object(self, obj)
        if consumed:
            return True

        backend_file = self._get_backend_file_for(obj)

        if isinstance(obj, GeneratedFile):
            skip_files = []

            if self.environment.is_artifact_build:
                skip_files = self._compile_env_gen_files

            for f in obj.outputs:
                if any(mozpath.match(f, p) for p in skip_files):
                    return False

            if backend_file.requires_delay(obj.inputs):
                backend_file.delayed_generated_files.append(obj)
            else:
                self._process_generated_file(backend_file, obj)
        elif (isinstance(obj, ChromeManifestEntry)
              and obj.install_target.startswith('dist/bin')):
            top_level = mozpath.join(obj.install_target, 'chrome.manifest')
            if obj.path != top_level:
                entry = 'manifest %s' % mozpath.relpath(
                    obj.path, obj.install_target)
                self._manifest_entries[top_level].add(entry)
            self._manifest_entries[obj.path].add(str(obj.entry))
        elif isinstance(obj, Defines):
            self._process_defines(backend_file, obj)
        elif isinstance(obj, HostDefines):
            self._process_defines(backend_file, obj, host=True)
        elif isinstance(obj, FinalTargetFiles):
            self._process_final_target_files(obj)
        elif isinstance(obj, FinalTargetPreprocessedFiles):
            self._process_final_target_pp_files(obj, backend_file)
        elif isinstance(obj, JARManifest):
            self._consume_jar_manifest(obj)
        elif isinstance(obj, PerSourceFlag):
            backend_file.per_source_flags[obj.file_name].extend(obj.flags)
        elif isinstance(obj, ComputedFlags):
            self._process_computed_flags(obj, backend_file)
        elif isinstance(obj, (Sources, GeneratedSources)):
            backend_file.sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, HostSources):
            backend_file.host_sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, VariablePassthru):
            backend_file.variables = obj.variables
        elif isinstance(obj, StaticLibrary):
            backend_file.static_lib = obj
        elif isinstance(obj, SharedLibrary):
            backend_file.shared_lib = obj
        elif isinstance(obj, HostProgram):
            pass
        elif isinstance(obj, Program):
            backend_file.program = obj

        # The top-level Makefile.in still contains our driver target and some
        # things related to artifact builds, so as a special case ensure the
        # make backend generates a Makefile there.
        if obj.objdir == self.environment.topobjdir:
            return False

        return True
Пример #36
0
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None,
                       lib_dirs=[],
                       assets_dirs=[],
                       features_dirs=[],
                       root_files=[],
                       verbose=False):
    jarrer = Jarrer(optimize=False)

    # First, take input files.  The contents of the later files overwrites the
    # content of earlier files.  Multidexing requires special care: we want a
    # coherent set of classesN.dex files, so we only take DEX files from a
    # single input.  This avoids taking, say, classes{1,2,3}.dex from the first
    # input and only classes{1,2}.dex from the second input, leading to
    # (potentially) duplicated symbols at runtime.
    last_input_with_dex_files = None
    for input in inputs:
        jar = JarReader(input)
        for file in jar:
            path = file.filename

            if mozpath.match(path, '/classes*.dex'):
                last_input_with_dex_files = input
                continue

            if jarrer.contains(path):
                jarrer.remove(path)
            jarrer.add(path, DeflatedFile(file), compress=file.compressed)

    # If we have an input with DEX files, take them all here.
    if last_input_with_dex_files:
        jar = JarReader(last_input_with_dex_files)
        for file in jar:
            path = file.filename

            if not mozpath.match(path, '/classes*.dex'):
                continue

            if jarrer.contains(path):
                jarrer.remove(path)
            jarrer.add(path, DeflatedFile(file), compress=file.compressed)

    def add(path, file, compress=None):
        abspath = os.path.abspath(file.path)
        if verbose:
            print('Packaging %s from %s' % (path, file.path))
        if not os.path.exists(abspath):
            raise ValueError('File %s not found (looked for %s)' % \
                             (file.path, abspath))
        if jarrer.contains(path):
            jarrer.remove(path)
        jarrer.add(path, file, compress=compress)

    for features_dir in features_dirs:
        finder = FileFinder(features_dir)
        for p, f in finder.find('**'):
            add(mozpath.join('assets', 'features', p), f, False)

    for assets_dir in assets_dirs:
        finder = FileFinder(assets_dir)
        for p, f in finder.find('**'):
            compress = None  # Take default from Jarrer.
            if p.endswith('.so'):
                # Asset libraries are special.
                if f.open().read(5)[1:] == '7zXZ':
                    print('%s is already compressed' % p)
                    # We need to store (rather than deflate) compressed libraries
                    # (even if we don't compress them ourselves).
                    compress = False
                elif buildconfig.substs.get('XZ'):
                    cmd = [buildconfig.substs.get('XZ'), '-zkf',
                           mozpath.join(finder.base, p)]

                    # For now, the mozglue XZStream ELF loader can only support xz files
                    # with a single stream that contains a single block. In xz, there is no
                    # explicit option to set the max block count. Instead, we force xz to use
                    # single thread mode, which results in a single block.
                    cmd.extend(['--threads=1'])

                    bcj = None
                    if buildconfig.substs.get('MOZ_THUMB2'):
                        bcj = '--armthumb'
                    elif buildconfig.substs.get('CPU_ARCH') == 'arm':
                        bcj = '--arm'
                    elif buildconfig.substs.get('CPU_ARCH') == 'x86':
                        bcj = '--x86'

                    if bcj:
                        cmd.extend([bcj])
                    # We need to explicitly specify the LZMA filter chain to ensure consistent builds
                    # across platforms. Note that the dict size must be less then 16MiB per the hardcoded
                    # value in mozglue/linker/XZStream.cpp. This is the default LZMA filter chain for for
                    # xz-utils version 5.0. See:
                    # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/lzma/lzma_encoder_presets.c
                    # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/api/lzma/container.h#L31
                    cmd.extend(['--lzma2=dict=8MiB,lc=3,lp=0,pb=2,mode=normal,nice=64,mf=bt4,depth=0'])
                    print('xz-compressing %s with %s' % (p, ' '.join(cmd)))
                    subprocess.check_output(cmd)
                    os.rename(f.path + '.xz', f.path)
                    compress = False

            add(mozpath.join('assets', p), f, compress=compress)

    for lib_dir in lib_dirs:
        finder = FileFinder(lib_dir)
        for p, f in finder.find('**'):
            add(mozpath.join('lib', p), f)

    for root_file in root_files:
        add(os.path.basename(root_file), File(root_file))

    if omni_ja:
        add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False)

    if classes_dex:
        if buildconfig.substs.get('MOZ_BUILD_MOBILE_ANDROID_WITH_GRADLE'):
            raise ValueError("Fennec APKs built --with-gradle "
                             "should never specify classes.dex")

        add('classes.dex', File(classes_dex))

    return jarrer
Пример #37
0
    def consume_object(self, obj):
        """Write out build files necessary to build with tup."""

        if not isinstance(obj, ContextDerived):
            return False

        consumed = CommonBackend.consume_object(self, obj)
        if consumed:
            return True

        backend_file = self._get_backend_file_for(obj)

        if isinstance(obj, GeneratedFile):
            # These files are already generated by make before tup runs.
            skip_files = (
                'buildid.h',
                'source-repo.h',
            )

            if self.environment.is_artifact_build:
                skip_files = skip_files + self._compile_env_gen_files

            for f in obj.outputs:
                if any(mozpath.match(f, p) for p in skip_files):
                    return False

            if 'application.ini.h' in obj.outputs:
                # application.ini.h is a special case since we need to process
                # the FINAL_TARGET_PP_FILES for application.ini before running
                # the GENERATED_FILES script, and tup doesn't handle the rules
                # out of order.
                backend_file.delayed_generated_files.append(obj)
            else:
                self._process_generated_file(backend_file, obj)
        elif (isinstance(obj, ChromeManifestEntry) and
              obj.install_target.startswith('dist/bin')):
            top_level = mozpath.join(obj.install_target, 'chrome.manifest')
            if obj.path != top_level:
                entry = 'manifest %s' % mozpath.relpath(obj.path,
                                                        obj.install_target)
                self._manifest_entries[top_level].add(entry)
            self._manifest_entries[obj.path].add(str(obj.entry))
        elif isinstance(obj, Defines):
            self._process_defines(backend_file, obj)
        elif isinstance(obj, HostDefines):
            self._process_defines(backend_file, obj, host=True)
        elif isinstance(obj, FinalTargetFiles):
            self._process_final_target_files(obj)
        elif isinstance(obj, FinalTargetPreprocessedFiles):
            self._process_final_target_pp_files(obj, backend_file)
        elif isinstance(obj, JARManifest):
            self._consume_jar_manifest(obj)
        elif isinstance(obj, PerSourceFlag):
            backend_file.per_source_flags[obj.file_name].extend(obj.flags)
        elif isinstance(obj, ComputedFlags):
            self._process_computed_flags(obj, backend_file)
        elif isinstance(obj, (Sources, GeneratedSources)):
            backend_file.sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, HostSources):
            backend_file.host_sources[obj.canonical_suffix].extend(obj.files)
        elif isinstance(obj, VariablePassthru):
            backend_file.variables = obj.variables

        return True
Пример #38
0
 def find(self, path):
     self.log.append(path)
     for f in sorted(self.files):
         if mozpath.match(f, path):
             yield f, self.files[f]
Пример #39
0
def fat_aar(distdir,
            aars_paths,
            no_process=False,
            no_compatibility_check=False):
    if no_process:
        print("Not processing architecture-specific artifact Maven AARs.")
        return 0

    # Map {filename: {fingerprint: [arch1, arch2, ...]}}.
    diffs = defaultdict(lambda: defaultdict(list))
    missing_arch_prefs = set()
    # Collect multi-architecture inputs to the fat AAR.
    copier = FileCopier()

    for arch, aar_path in aars_paths.items():
        # Map old non-architecture-specific path to new architecture-specific path.
        old_rewrite_map = {
            "greprefs.js":
            "{}/greprefs.js".format(arch),
            "defaults/pref/geckoview-prefs.js":
            "defaults/pref/{}/geckoview-prefs.js".format(arch),
        }

        # Architecture-specific preferences files.
        arch_prefs = set(old_rewrite_map.values())
        missing_arch_prefs |= set(arch_prefs)

        jar_finder = JarFinder(aar_path, JarReader(aar_path))
        for path, fileobj in UnpackFinder(jar_finder):
            # Native libraries go straight through.
            if mozpath.match(path, "jni/**"):
                copier.add(path, fileobj)

            elif path in arch_prefs:
                copier.add(path, fileobj)

            elif path in ("classes.jar", "annotations.zip"):
                # annotations.zip differs due to timestamps, but the contents should not.

                # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt,
                # and it's not worth working around, so we use Python's zip functionality
                # instead.
                z = ZipFile(BytesIO(fileobj.open().read()))
                for r in z.namelist():
                    fingerprint = sha1(z.open(r).read()).hexdigest()
                    diffs["{}!/{}".format(path, r)][fingerprint].append(arch)

            else:
                fingerprint = sha1(six.ensure_binary(
                    fileobj.open().read())).hexdigest()
                # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here,
                # since in practice they will never overlap.
                diffs[path][fingerprint].append(arch)

            missing_arch_prefs.discard(path)

    # Some differences are allowed across the architecture-specific AARs.  We could allow-list
    # the actual content, but it's not necessary right now.
    allow_pattern_list = {
        "AndroidManifest.xml",  # Min SDK version is different for 32- and 64-bit builds.
        "classes.jar!/org/mozilla/gecko/util/HardwareUtils.class",  # Min SDK as well.
        "classes.jar!/org/mozilla/geckoview/BuildConfig.class",
        # Each input captures its CPU architecture.
        "chrome/toolkit/content/global/buildconfig.html",
        # Bug 1556162: localized resources are not deterministic across
        # per-architecture builds triggered from the same push.
        "**/*.ftl",
        "**/*.dtd",
        "**/*.properties",
    }

    not_allowed = OrderedDict()

    def format_diffs(ds):
        # Like '  armeabi-v7a, arm64-v8a -> XXX\n  x86, x86_64 -> YYY'.
        return "\n".join(
            sorted("  {archs} -> {fingerprint}".format(
                archs=", ".join(sorted(archs)), fingerprint=fingerprint)
                   for fingerprint, archs in ds.items()))

    for p, ds in sorted(diffs.items()):
        if len(ds) <= 1:
            # Only one hash across all inputs: roll on.
            continue

        if any(mozpath.match(p, pat) for pat in allow_pattern_list):
            print(
                'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
                .format(path=p, ds_repr=format_diffs(ds)))
            continue

        not_allowed[p] = ds

    for p, ds in not_allowed.items():
        print(
            'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
            .format(path=p, ds_repr=format_diffs(ds)))

    for missing in sorted(missing_arch_prefs):
        print(
            "Disallowed: Inputs missing expected architecture-specific input: {missing}"
            .format(missing=missing))

    if not no_compatibility_check and (missing_arch_prefs or not_allowed):
        return 1

    output_dir = mozpath.join(distdir, "output")
    copier.copy(output_dir)

    return 0
Пример #40
0
    def _resolve(self,
                 paths=None,
                 flavor=None,
                 subsuite=None,
                 under_path=None,
                 tags=None):
        if tags:
            tags = set(tags)

        def fltr(tests):
            for test in tests:
                if flavor:
                    if flavor == 'devtools' and test.get(
                            'flavor') != 'browser-chrome':
                        continue
                    if flavor != 'devtools' and test.get('flavor') != flavor:
                        continue

                if subsuite and test.get('subsuite', 'undefined') != subsuite:
                    continue

                if tags and not (tags & set(test.get('tags', '').split())):
                    continue

                if under_path and not test['file_relpath'].startswith(
                        under_path):
                    continue

                # Make a copy so modifications don't change the source.
                yield dict(test)

        paths = paths or []
        paths = [mozpath.normpath(p) for p in paths]
        if not paths:
            paths = [None]

        candidate_paths = set()

        if flavor in (None, 'puppeteer') and any(
                self.is_puppeteer_path(p) for p in paths):
            self.add_puppeteer_manifest_data()

        if flavor in (None, 'web-platform-tests') and any(
                self.is_wpt_path(p) for p in paths):
            self.add_wpt_manifest_data()

        for path in sorted(paths):
            if path is None:
                candidate_paths |= set(self.tests_by_path.keys())
                continue

            if '*' in path:
                candidate_paths |= {
                    p
                    for p in self.tests_by_path if mozpath.match(p, path)
                }
                continue

            # If the path is a directory, or the path is a prefix of a directory
            # containing tests, pull in all tests in that directory.
            if (path in self.test_dirs
                    or any(p.startswith(path) for p in self.tests_by_path)):
                candidate_paths |= {
                    p
                    for p in self.tests_by_path if p.startswith(path)
                }
                continue

            # If the path is a manifest, add all tests defined in that manifest.
            if any(path.endswith(e) for e in ('.ini', '.list')):
                key = 'manifest' if os.path.isabs(path) else 'manifest_relpath'
                candidate_paths |= {
                    t['file_relpath']
                    for t in self.tests if mozpath.normpath(t[key]) == path
                }
                continue

            # If it's a test file, add just that file.
            candidate_paths |= {p for p in self.tests_by_path if path in p}

        for p in sorted(candidate_paths):
            tests = self.tests_by_path[p]

            for test in fltr(tests):
                yield test
Пример #41
0
 def test_match(self):
     self.assertTrue(match('foo', ''))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo'))
     self.assertTrue(match('foo', '*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '*/*/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux'))
     self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**'))
     self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
     self.assertTrue(match('foo/bar/baz.qux', '**/*.qux'))
     self.assertFalse(match('foo/bar/baz.qux', '**.qux'))
     self.assertFalse(match('foo/bar', 'foo/*/bar'))
     self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/**'))
     self.assertFalse(match('foo/nobar/baz.qux', 'foo/**/bar/**'))
     self.assertTrue(match('foo/bar', 'foo/**/bar/**'))
Пример #42
0
 def match(self, patterns):
     return any(mozpath.match(self.path, pattern.path) for pattern in patterns)
Пример #43
0
 def test_match(self):
     self.assertTrue(match("foo", ""))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar"))
     self.assertTrue(match("foo/bar/baz.qux", "foo"))
     self.assertTrue(match("foo", "*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/*/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "*/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "*/*/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "*/*/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/*/*"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/*/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/b*/*z.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/b*r/ba*z.qux"))
     self.assertFalse(match("foo/bar/baz.qux", "foo/b*z/ba*r.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**"))
     self.assertTrue(match("foo/bar/baz.qux", "**/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**/foo/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/baz.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
     self.assertTrue(match("foo/bar/baz.qux", "**/*.qux"))
     self.assertFalse(match("foo/bar/baz.qux", "**.qux"))
     self.assertFalse(match("foo/bar", "foo/*/bar"))
Пример #44
0
def main(output_dirname, verbose, *input_dirs):
    # Map directories to source paths, like
    # `{'values-large-v11': ['/path/to/values-large-v11/strings.xml',
    #                        '/path/to/values-large-v11/colors.xml', ...], ...}`.
    values = defaultdict(list)
    # Map unversioned resource names to maps from versions to source paths, like:
    # `{'drawable-large/icon.png':
    #     {None: '/path/to/drawable-large/icon.png',
    #      11: '/path/to/drawable-large-v11/icon.png', ...}, ...}`.
    resources = defaultdict(dict)

    manifest = InstallManifest()

    for p in uniqify(input_dirs):
        finder = FileFinder(p, find_executables=False)

        values_pattern = 'values*/*.xml'
        for path, _ in finder.find('*/*'):
            if path in MANIFEST_EXCLUSIONS:
                continue

            source_path = mozpath.join(finder.base, path)

            if mozpath.match(path, values_pattern):
                dir, _name = path.split('/')
                dir = with_version(dir)
                values[dir].append(source_path)
                continue

            (resource, version) = classify(path)

            # Earlier paths are taken in preference to later paths.
            # This agrees with aapt.
            if version not in resources:
                resources[resource][version] = source_path

    # Step 1: merge all XML values into one single, sorted
    # per-configuration values.xml file.  This apes what the Android
    # Gradle resource merging algorithm does.
    merged_values = defaultdict(list)

    for dir, files in values.items():
        for file in files:
            values = ET.ElementTree(file=file).getroot()
            merged_values[dir].extend(values)

        values = ET.Element('resources')
        # Sort by <type> tag, and then by name.  Note that <item
        # type="type"> is equivalent to <type>.
        key = lambda x: (resource_type.get(x.get('type', x.tag)), x.get('name')
                         )
        values[:] = sorted(merged_values[dir], key=key)

        for value in values:
            if value.get('name') == 'TextAppearance.Design.Snackbar.Message':
                if value.get('{http://schemas.android.com/tools}override',
                             False):
                    values.remove(value)
                    break

        merged_values[dir] = values

    for dir, values in merged_values.items():
        o = mozpath.join(output_dirname, dir, '{}.xml'.format(dir))
        ensureParentDir(o)
        ET.ElementTree(values).write(o)

        manifest.add_required_exists(mozpath.join(dir, '{}.xml'.format(dir)))

    # Step 2a: add version numbers for unversioned features
    # corresponding to when the feature was introduced.  Resource
    # qualifiers will never be recognized by Android versions before
    # they were introduced.  For example, density qualifiers are
    # supported only in Android v4 and above.  Therefore
    # "drawable-hdpi" is implicitly "drawable-hdpi-v4".  We version
    # such unversioned resources here.
    for (resource, versions) in resources.items():
        if None in versions:
            dir, name = resource.split('/')
            new_dir = with_version(dir)
            (new_resource,
             new_version) = classify('{}/{}'.format(new_dir, name))
            if new_resource != resource:
                raise ValueError('this is bad')

            # `new_version` might be None: for example, `dir` might be "drawable".
            source_path = versions.pop(None)
            versions[new_version] = source_path

            if verbose:
                if new_version:
                    print("Versioning unversioned resource {} as {}-v{}/{}".
                          format(source_path, dir, new_version, name))

    # TODO: make this a command line argument that takes MOZ_ANDROID_MIN_SDK_VERSION.
    min_sdk = 15
    retained = defaultdict(dict)

    # Step 2b: drop resource directories that will never be used by
    # Android on device.  This depends on the minimum supported
    # Android SDK version.  Suppose the minimum SDK is 15 and we have
    # drawable-v4/icon.png and drawable-v11/icon.png.  The v4 version
    # will never be chosen, since v15 is always greater than v11.
    for (resource, versions) in resources.items():

        def key(v):
            return 0 if v is None else v

        # Versions in descending order.
        version_list = sorted(versions.keys(), key=key, reverse=True)
        for version in version_list:
            retained[resource][version] = versions[version]
            if version is not None and version <= min_sdk:
                break

    if set(retained.keys()) != set(resources.keys()):
        raise ValueError('Something terrible has happened; retained '
                         'resource names do not match input resources '
                         'names')

    if verbose:
        for resource in resources:
            if resources[resource] != retained[resource]:
                for version in sorted(resources[resource].keys(),
                                      reverse=True):
                    if version in retained[resource]:
                        print("Keeping reachable resource {}".format(
                            resources[resource][version]))
                    else:
                        print("Dropping unreachable resource {}".format(
                            resources[resource][version]))

    # Populate manifest.
    for (resource, versions) in retained.items():
        for version in sorted(versions.keys(), reverse=True):
            path = resource
            if version:
                dir, name = resource.split('/')
                path = '{}-v{}/{}'.format(dir, version, name)
            manifest.add_copy(versions[version], path)

    copier = FileCopier()
    manifest.populate_registry(copier)
    print('mr', os.getcwd())
    result = copier.copy(output_dirname,
                         remove_unaccounted=True,
                         remove_all_directory_symlinks=False,
                         remove_empty_directories=True)

    if verbose:
        print('Updated:', result.updated_files_count)
        print('Removed:',
              result.removed_files_count + result.removed_directories_count)
        print('Existed:', result.existing_files_count)

    return 0
Пример #45
0
 def is_puppeteer_path(self, path):
     if path is None:
         return True
     return mozpath.match(path, "remote/test/puppeteer/test/**")
def package_fennec_apk(inputs=[],
                       omni_ja=None,
                       lib_dirs=[],
                       assets_dirs=[],
                       features_dirs=[],
                       root_files=[],
                       verbose=False):
    jarrer = Jarrer(optimize=False)

    # First, take input files.  The contents of the later files overwrites the
    # content of earlier files.  Multidexing requires special care: we want a
    # coherent set of classesN.dex files, so we only take DEX files from a
    # single input.  This avoids taking, say, classes{1,2,3}.dex from the first
    # input and only classes{1,2}.dex from the second input, leading to
    # (potentially) duplicated symbols at runtime.
    last_input_with_dex_files = None
    for input in inputs:
        jar = JarReader(input)
        for file in jar:
            path = file.filename

            if mozpath.match(path, '/classes*.dex'):
                last_input_with_dex_files = input
                continue

            if jarrer.contains(path):
                jarrer.remove(path)
            jarrer.add(path, DeflatedFile(file), compress=file.compressed)

    # If we have an input with DEX files, take them all here.
    if last_input_with_dex_files:
        jar = JarReader(last_input_with_dex_files)
        for file in jar:
            path = file.filename

            if not mozpath.match(path, '/classes*.dex'):
                continue

            if jarrer.contains(path):
                jarrer.remove(path)
            jarrer.add(path, DeflatedFile(file), compress=file.compressed)

    def add(path, file, compress=None):
        abspath = os.path.abspath(file.path)
        if verbose:
            print('Packaging %s from %s' % (path, file.path))
        if not os.path.exists(abspath):
            raise ValueError('File %s not found (looked for %s)' % \
                             (file.path, abspath))
        if jarrer.contains(path):
            jarrer.remove(path)
        jarrer.add(path, file, compress=compress)

    for features_dir in features_dirs:
        finder = FileFinder(features_dir)
        for p, f in finder.find('**'):
            add(mozpath.join('assets', 'features', p), f, False)

    for assets_dir in assets_dirs:
        finder = FileFinder(assets_dir)
        for p, f in finder.find('**'):
            compress = None  # Take default from Jarrer.
            if p.endswith('.so'):
                # Asset libraries are special.
                if f.open().read(5)[1:] == '7zXZ':
                    print('%s is already compressed' % p)
                    # We need to store (rather than deflate) compressed libraries
                    # (even if we don't compress them ourselves).
                    compress = False
                elif buildconfig.substs.get('XZ'):
                    cmd = [
                        buildconfig.substs.get('XZ'), '-zkf',
                        mozpath.join(finder.base, p)
                    ]

                    # For now, the mozglue XZStream ELF loader can only support xz files
                    # with a single stream that contains a single block. In xz, there is no
                    # explicit option to set the max block count. Instead, we force xz to use
                    # single thread mode, which results in a single block.
                    cmd.extend(['--threads=1'])

                    bcj = None
                    if buildconfig.substs.get('MOZ_THUMB2'):
                        bcj = '--armthumb'
                    elif buildconfig.substs.get('CPU_ARCH') == 'arm':
                        bcj = '--arm'
                    elif buildconfig.substs.get('CPU_ARCH') == 'x86':
                        bcj = '--x86'

                    if bcj:
                        cmd.extend([bcj])
                    # We need to explicitly specify the LZMA filter chain to ensure consistent builds
                    # across platforms. Note that the dict size must be less then 16MiB per the hardcoded
                    # value in mozglue/linker/XZStream.cpp. This is the default LZMA filter chain for for
                    # xz-utils version 5.0. See:
                    # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/lzma/lzma_encoder_presets.c
                    # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/api/lzma/container.h#L31
                    cmd.extend([
                        '--lzma2=dict=8MiB,lc=3,lp=0,pb=2,mode=normal,nice=64,mf=bt4,depth=0'
                    ])
                    print('xz-compressing %s with %s' % (p, ' '.join(cmd)))
                    subprocess.check_output(cmd)
                    os.rename(f.path + '.xz', f.path)
                    compress = False

            add(mozpath.join('assets', p), f, compress=compress)

    for lib_dir in lib_dirs:
        finder = FileFinder(lib_dir)
        for p, f in finder.find('**'):
            add(mozpath.join('lib', p), f)

    for root_file in root_files:
        add(os.path.basename(root_file), File(root_file))

    if omni_ja:
        add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False)

    return jarrer
Пример #47
0
 def add(self, path, content):
     if not any(mozpath.match(path, spec) for spec in self._files):
         self._formatter.add(path, content)
     else:
         self._error(self._msg % path)
Пример #48
0
    def resolve_tests(self,
                      paths=None,
                      flavor=None,
                      subsuite=None,
                      under_path=None,
                      tags=None):
        """Resolve tests from an identifier.

        This is a generator of dicts describing each test.

        ``paths`` can be an iterable of values to use to identify tests to run.
        If an entry is a known test file, tests associated with that file are
        returned (there may be multiple configurations for a single file). If
        an entry is a directory, or a prefix of a directory containing tests,
        all tests in that directory are returned. If the string appears in a
        known test file, that test file is considered. If the path contains
        a wildcard pattern, tests matching that pattern are returned.

        If ``under_path`` is a string, it will be used to filter out tests that
        aren't in the specified path prefix relative to topsrcdir or the
        test's installed dir.

        If ``flavor`` is a string, it will be used to filter returned tests
        to only be the flavor specified. A flavor is something like
        ``xpcshell``.

        If ``subsuite`` is a string, it will be used to filter returned tests
        to only be in the subsuite specified.

        If ``tags`` are specified, they will be used to filter returned tests
        to only those with a matching tag.
        """
        if tags:
            tags = set(tags)

        def fltr(tests):
            for test in tests:
                if flavor:
                    if flavor == 'devtools' and test.get(
                            'flavor') != 'browser-chrome':
                        continue
                    if flavor != 'devtools' and test.get('flavor') != flavor:
                        continue

                if subsuite and test.get('subsuite') != subsuite:
                    continue

                if tags and not (tags & set(test.get('tags', '').split())):
                    continue

                if under_path and not test['file_relpath'].startswith(
                        under_path):
                    continue

                # Make a copy so modifications don't change the source.
                yield dict(test)

        paths = paths or []
        paths = [mozpath.normpath(p) for p in paths]
        if not paths:
            paths = [None]

        candidate_paths = set()

        if flavor in (None, 'web-platform-tests') and any(
                self.is_wpt_path(p) for p in paths):
            self.add_wpt_manifest_data()

        for path in sorted(paths):
            if path is None:
                candidate_paths |= set(self._tests_by_path.keys())
                continue

            if '*' in path:
                candidate_paths |= {
                    p
                    for p in self._tests_by_path if mozpath.match(p, path)
                }
                continue

            # If the path is a directory, or the path is a prefix of a directory
            # containing tests, pull in all tests in that directory.
            if (path in self._test_dirs
                    or any(p.startswith(path) for p in self._tests_by_path)):
                candidate_paths |= {
                    p
                    for p in self._tests_by_path if p.startswith(path)
                }
                continue

            # If it's a test file, add just that file.
            candidate_paths |= {p for p in self._tests_by_path if path in p}

        for p in sorted(candidate_paths):
            tests = self._tests_by_path[p]

            for test in fltr(tests):
                yield test