示例#1
0
    def _synchronize_docs(self):
        m = InstallManifest()

        m.add_symlink(self._conf_py_path, 'conf.py')

        for dest, source in sorted(self._trees.items()):
            source_dir = os.path.join(self._topsrcdir, source)
            for root, dirs, files in os.walk(source_dir):
                for f in files:
                    source_path = os.path.join(root, f)
                    rel_source = source_path[len(source_dir) + 1:]

                    m.add_symlink(source_path, os.path.join(dest, rel_source))

        stage_dir = os.path.join(self._output_dir, 'staging')
        copier = FileCopier()
        m.populate_registry(copier)
        copier.copy(stage_dir)

        with open(self._index_path, 'rb') as fh:
            data = fh.read()

        indexes = ['%s/index' % p for p in sorted(self._trees.keys())]
        indexes = '\n   '.join(indexes)

        packages = [os.path.basename(p) for p in self._python_package_dirs]
        packages = ['python/%s' % p for p in packages]
        packages = '\n   '.join(sorted(packages))
        data = data.format(indexes=indexes, python_packages=packages)

        with open(os.path.join(stage_dir, 'index.rst'), 'wb') as fh:
            fh.write(data)
示例#2
0
    def _synchronize_docs(self):
        m = InstallManifest()

        m.add_symlink(self._conf_py_path, 'conf.py')

        for dest, source in sorted(self._trees.items()):
            source_dir = os.path.join(self._topsrcdir, source)
            for root, dirs, files in os.walk(source_dir):
                for f in files:
                    source_path = os.path.join(root, f)
                    rel_source = source_path[len(source_dir) + 1:]

                    m.add_symlink(source_path, os.path.join(dest, rel_source))

        stage_dir = os.path.join(self._output_dir, 'staging')
        copier = FileCopier()
        m.populate_registry(copier)
        copier.copy(stage_dir)

        with open(self._index_path, 'rb') as fh:
            data = fh.read()

        indexes = ['%s/index' % p for p in sorted(self._trees.keys())]
        indexes = '\n   '.join(indexes)

        packages = [os.path.basename(p) for p in self._python_package_dirs]
        packages = ['python/%s' % p for p in packages]
        packages = '\n   '.join(sorted(packages))
        data = data.format(indexes=indexes, python_packages=packages)

        with open(os.path.join(stage_dir, 'index.rst'), 'wb') as fh:
            fh.write(data)
示例#3
0
    def _synchronize_docs(self):
        m = InstallManifest()

        m.add_symlink(self._conf_py_path, "conf.py")

        for dest, source in sorted(self._trees.items()):
            source_dir = os.path.join(self._topsrcdir, source)
            for root, dirs, files in os.walk(source_dir):
                for f in files:
                    source_path = os.path.join(root, f)
                    rel_source = source_path[len(source_dir) + 1 :]

                    m.add_symlink(source_path, os.path.join(dest, rel_source))

        copier = FileCopier()
        m.populate_registry(copier)
        copier.copy(self._docs_dir)

        with open(self._index_path, "rb") as fh:
            data = fh.read()

        indexes = ["%s/index" % p for p in sorted(self._trees.keys())]
        indexes = "\n   ".join(indexes)

        packages = [os.path.basename(p) for p in self._python_package_dirs]
        packages = ["python/%s" % p for p in packages]
        packages = "\n   ".join(sorted(packages))
        data = data.format(indexes=indexes, python_packages=packages)

        with open(os.path.join(self._docs_dir, "index.rst"), "wb") as fh:
            fh.write(data)
示例#4
0
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs):
    """Installs the requested test files to the objdir. This is invoked by
    test runners to avoid installing tens of thousands of test files when
    only a few tests need to be run.
    """

    if test_objs:
        manifest = _make_install_manifest(topsrcdir, topobjdir, test_objs)
    else:
        # If we don't actually have a list of tests to install we install
        # test and support files wholesale.
        manifest = InstallManifest(
            mozpath.join(topobjdir, '_build_manifests', 'install',
                         '_test_files'))

    harness_files_manifest = mozpath.join(topobjdir, '_build_manifests',
                                          'install', tests_root)

    if os.path.isfile(harness_files_manifest):
        # If the backend has generated an install manifest for test harness
        # files they are treated as a monolith and installed each time we
        # run tests. Fortunately there are not very many.
        manifest |= InstallManifest(harness_files_manifest)

    copier = FileCopier()
    manifest.populate_registry(copier)
    copier.copy(mozpath.join(topobjdir, tests_root), remove_unaccounted=False)
def process_manifest(destdir, paths, remove_unaccounted=True):
    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier)
    return copier.copy(destdir, remove_unaccounted=remove_unaccounted)
def process_manifest(destdir, paths, remove_unaccounted=True):
    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier)
    return copier.copy(destdir, remove_unaccounted=remove_unaccounted)
示例#7
0
def process_manifest(destdir, *paths):
    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier)
    return copier.copy(destdir)
示例#8
0
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs):
    """Installs the requested test files to the objdir. This is invoked by
    test runners to avoid installing tens of thousands of test files when
    only a few tests need to be run.
    """
    flavor_info = {flavor: (root, prefix, install)
                   for (flavor, root, prefix, install) in TEST_MANIFESTS.values()}
    objdir_dest = mozpath.join(topobjdir, tests_root)

    converter = SupportFilesConverter()
    install_info = TestInstallInfo()
    for o in test_objs:
        flavor = o['flavor']
        if flavor not in flavor_info:
            # This is a test flavor that isn't installed by the build system.
            continue
        root, prefix, install = flavor_info[flavor]
        if not install:
            # This flavor isn't installed to the objdir.
            continue

        manifest_path = o['manifest']
        manifest_dir = mozpath.dirname(manifest_path)

        out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:])
        file_relpath = o['file_relpath']
        source = mozpath.join(topsrcdir, file_relpath)
        dest = mozpath.join(root, prefix, file_relpath)
        if 'install-to-subdir' in o:
            out_dir = mozpath.join(out_dir, o['install-to-subdir'])
            manifest_relpath = mozpath.relpath(source, mozpath.dirname(manifest_path))
            dest = mozpath.join(out_dir, manifest_relpath)

        install_info.installs.append((source, dest))
        install_info |= converter.convert_support_files(o, root,
                                                        manifest_dir,
                                                        out_dir)

    manifest = InstallManifest()

    for source, dest in set(install_info.installs):
        if dest in install_info.external_installs:
            continue
        manifest.add_symlink(source, dest)
    for base, pattern, dest in install_info.pattern_installs:
        manifest.add_pattern_symlink(base, pattern, dest)

    _resolve_installs(install_info.deferred_installs, topobjdir, manifest)

    # Harness files are treated as a monolith and installed each time we run tests.
    # Fortunately there are not very many.
    manifest |= InstallManifest(mozpath.join(topobjdir,
                                             '_build_manifests',
                                             'install', tests_root))
    copier = FileCopier()
    manifest.populate_registry(copier)
    copier.copy(objdir_dest,
                remove_unaccounted=False)
示例#9
0
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs):
    """Installs the requested test files to the objdir. This is invoked by
    test runners to avoid installing tens of thousands of test files when
    only a few tests need to be run.
    """
    flavor_info = {
        flavor: (root, prefix, install)
        for (flavor, root, prefix, install) in TEST_MANIFESTS.values()
    }
    objdir_dest = mozpath.join(topobjdir, tests_root)

    converter = SupportFilesConverter()
    install_info = TestInstallInfo()
    for o in test_objs:
        flavor = o['flavor']
        if flavor not in flavor_info:
            # This is a test flavor that isn't installed by the build system.
            continue
        root, prefix, install = flavor_info[flavor]
        if not install:
            # This flavor isn't installed to the objdir.
            continue

        manifest_path = o['manifest']
        manifest_dir = mozpath.dirname(manifest_path)

        out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:])
        file_relpath = o['file_relpath']
        source = mozpath.join(topsrcdir, file_relpath)
        dest = mozpath.join(root, prefix, file_relpath)
        if 'install-to-subdir' in o:
            out_dir = mozpath.join(out_dir, o['install-to-subdir'])
            manifest_relpath = mozpath.relpath(source,
                                               mozpath.dirname(manifest_path))
            dest = mozpath.join(out_dir, manifest_relpath)

        install_info.installs.append((source, dest))
        install_info |= converter.convert_support_files(
            o, root, manifest_dir, out_dir)

    manifest = InstallManifest()

    for source, dest in set(install_info.installs):
        if dest in install_info.external_installs:
            continue
        manifest.add_symlink(source, dest)
    for base, pattern, dest in install_info.pattern_installs:
        manifest.add_pattern_symlink(base, pattern, dest)

    _resolve_installs(install_info.deferred_installs, topobjdir, manifest)

    # Harness files are treated as a monolith and installed each time we run tests.
    # Fortunately there are not very many.
    manifest |= InstallManifest(
        mozpath.join(topobjdir, '_build_manifests', 'install', tests_root))
    copier = FileCopier()
    manifest.populate_registry(copier)
    copier.copy(objdir_dest, remove_unaccounted=False)
示例#10
0
    def consume_finished(self):
        mp = os.path.join(self.environment.topobjdir, '_build_manifests',
                          'install', '_tests')
        install_manifest = InstallManifest(mp)
        reg = FileRegistry()
        install_manifest.populate_registry(reg)

        for dest, src in reg:
            if not hasattr(src, 'path'):
                continue

            if not os.path.isabs(dest):
                dest = '_tests/' + dest

            obj_path = mozpath.join(self.environment.topobjdir, dest)
            if isinstance(src, PreprocessedFile):
                assert os.path.exists(obj_path), '%s should exist' % obj_path
                pp_info = generate_pp_info(obj_path,
                                           self.environment.topsrcdir)
            else:
                pp_info = None

            rel_src = mozpath.relpath(src.path, self.environment.topsrcdir)
            self._install_mapping[dest] = rel_src, pp_info

        # Our result has four parts:
        #  A map from url prefixes to objdir directories:
        #  { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
        #  A map of overrides.
        #  A map from objdir paths to sourcedir paths, and an object storing mapping
        #    information for preprocessed files:
        #  { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
        #    [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ],
        #    ... }
        #  An object containing build configuration information.
        outputfile = os.path.join(self.environment.topobjdir,
                                  'chrome-map.json')
        with self._write_file(outputfile) as fh:
            chrome_mapping = self.manifest_handler.chrome_mapping
            overrides = self.manifest_handler.overrides
            json.dump([{k: list(v)
                        for k, v in chrome_mapping.iteritems()}, overrides,
                       self._install_mapping, {
                           'topobjdir':
                           mozpath.normpath(self.environment.topobjdir),
                           'MOZ_APP_NAME':
                           self.environment.substs.get('MOZ_APP_NAME'),
                           'OMNIJAR_NAME':
                           self.environment.substs.get('OMNIJAR_NAME'),
                           'MOZ_MACBUNDLE_NAME':
                           self.environment.substs.get('MOZ_MACBUNDLE_NAME'),
                       }],
                      fh,
                      sort_keys=True,
                      indent=2)
示例#11
0
 def test_final_target_files_wildcard(self):
     """Ensure that wildcards in FINAL_TARGET_FILES work properly."""
     env = self._consume('final-target-files-wildcard', FasterMakeBackend)
     m = InstallManifest(path=mozpath.join(env.topobjdir,
         'faster', 'install_dist_bin'))
     self.assertEqual(len(m), 1)
     reg = FileRegistry()
     m.populate_registry(reg)
     expected = [('foo/bar.xyz', 'bar.xyz'), ('foo/foo.xyz', 'foo.xyz')]
     actual = [(path, mozpath.relpath(f.path, env.topsrcdir)) for (path, f) in reg]
     self.assertEqual(expected, actual)
示例#12
0
    def _load_manifest(self, path, root):
        install_manifest = InstallManifest(path)
        reg = FileRegistry()
        install_manifest.populate_registry(reg)

        for dest, src in reg:
            if hasattr(src, 'path'):
                if not os.path.isabs(dest):
                    dest = root + dest
                self._install_mapping[dest] = (src.path,
                                               isinstance(
                                                   src, PreprocessedFile))
示例#13
0
 def test_final_target_files_wildcard(self):
     """Ensure that wildcards in FINAL_TARGET_FILES work properly."""
     env = self._consume("final-target-files-wildcard", FasterMakeBackend)
     m = InstallManifest(
         path=mozpath.join(env.topobjdir, "faster", "install_dist_bin"))
     self.assertEqual(len(m), 1)
     reg = FileRegistry()
     m.populate_registry(reg)
     expected = [("foo/bar.xyz", "bar.xyz"), ("foo/foo.xyz", "foo.xyz")]
     actual = [(path, mozpath.relpath(f.path, env.topsrcdir))
               for (path, f) in reg]
     self.assertEqual(expected, actual)
示例#14
0
    def file_copier(self):
        # TODO: invalidate the file copier when the build system
        # itself changes, i.e., the underlying unified manifest
        # changes.
        file_copier = FileCopier()

        unified_manifest = InstallManifest(
            mozpath.join(self.config_environment.topobjdir,
                         'faster', 'unified_install_dist_bin'))

        unified_manifest.populate_registry(file_copier, defines_override=self.defines)

        return file_copier
示例#15
0
    def file_copier(self):
        # TODO: invalidate the file copier when the build system
        # itself changes, i.e., the underlying unified manifest
        # changes.
        file_copier = FileCopier()

        unified_manifest = InstallManifest(
            mozpath.join(self.config_environment.topobjdir,
                         'faster', 'unified_install_dist_bin'))

        unified_manifest.populate_registry(file_copier, defines_override=self.defines)

        return file_copier
示例#16
0
def find_generated_harness_files():
    # TEST_HARNESS_FILES end up in an install manifest at
    # $topsrcdir/_build_manifests/install/_tests.
    manifest = InstallManifest(mozpath.join(buildconfig.topobjdir,
                                            '_build_manifests',
                                            'install',
                                            '_tests'))
    registry = FileRegistry()
    manifest.populate_registry(registry)
    # Conveniently, the generated files we care about will already
    # exist in the objdir, so we can identify relevant files if
    # they're an `ExistingFile` instance.
    return [mozpath.join('_tests', p) for p in registry.paths()
            if isinstance(registry[p], ExistingFile)]
示例#17
0
def find_generated_harness_files():
    # TEST_HARNESS_FILES end up in an install manifest at
    # $topsrcdir/_build_manifests/install/_tests.
    manifest = InstallManifest(mozpath.join(buildconfig.topobjdir,
                                            '_build_manifests',
                                            'install',
                                            '_tests'))
    registry = FileRegistry()
    manifest.populate_registry(registry)
    # Conveniently, the generated files we care about will already
    # exist in the objdir, so we can identify relevant files if
    # they're an `ExistingFile` instance.
    return [mozpath.join('_tests', p) for p in registry.paths()
            if isinstance(registry[p], ExistingFile)]
def process_manifest(destdir, paths,
        remove_unaccounted=True,
        remove_all_directory_symlinks=True,
        remove_empty_directories=True):
    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier)
    return copier.copy(destdir,
        remove_unaccounted=remove_unaccounted,
        remove_all_directory_symlinks=remove_all_directory_symlinks,
        remove_empty_directories=remove_empty_directories)
示例#19
0
def process_manifest(destdir,
                     paths,
                     track=None,
                     remove_unaccounted=True,
                     remove_all_directory_symlinks=True,
                     remove_empty_directories=True,
                     defines={}):

    if track:
        if os.path.exists(track):
            # We use the same format as install manifests for the tracking
            # data.
            manifest = InstallManifest(path=track)
            remove_unaccounted = FileRegistry()
            dummy_file = BaseFile()

            finder = FileFinder(destdir,
                                find_executables=False,
                                find_dotfiles=True)
            for dest in manifest._dests:
                if '*' in dest:
                    for p, f in finder.find(dest):
                        remove_unaccounted.add(p, dummy_file)
                else:
                    remove_unaccounted.add(dest, dummy_file)
        else:
            # If tracking is enabled and there is no file, we don't want to
            # be removing anything.
            remove_unaccounted = False
            remove_empty_directories = False
            remove_all_directory_symlinks = False

    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier, defines_override=defines)
    result = copier.copy(
        destdir,
        remove_unaccounted=remove_unaccounted,
        remove_all_directory_symlinks=remove_all_directory_symlinks,
        remove_empty_directories=remove_empty_directories)

    if track:
        manifest.write(path=track)

    return result
示例#20
0
def process_manifest(destdir,
                     paths,
                     remove_unaccounted=True,
                     remove_all_directory_symlinks=True,
                     remove_empty_directories=True):
    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier)
    return copier.copy(
        destdir,
        remove_unaccounted=remove_unaccounted,
        remove_all_directory_symlinks=remove_all_directory_symlinks,
        remove_empty_directories=remove_empty_directories)
def process_manifest(
    destdir,
    paths,
    track=None,
    remove_unaccounted=True,
    remove_all_directory_symlinks=True,
    remove_empty_directories=True,
    defines={},
):

    if track:
        if os.path.exists(track):
            # We use the same format as install manifests for the tracking
            # data.
            manifest = InstallManifest(path=track)
            remove_unaccounted = FileRegistry()
            dummy_file = BaseFile()

            finder = FileFinder(destdir, find_executables=False, find_dotfiles=True)
            for dest in manifest._dests:
                for p, f in finder.find(dest):
                    remove_unaccounted.add(p, dummy_file)

        else:
            # If tracking is enabled and there is no file, we don't want to
            # be removing anything.
            remove_unaccounted = False
            remove_empty_directories = False
            remove_all_directory_symlinks = False

    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    manifest.populate_registry(copier, defines_override=defines)
    result = copier.copy(
        destdir,
        remove_unaccounted=remove_unaccounted,
        remove_all_directory_symlinks=remove_all_directory_symlinks,
        remove_empty_directories=remove_empty_directories,
    )

    if track:
        manifest.write(path=track)

    return result
示例#22
0
def process_manifest(destdir, paths, track,
                     no_symlinks=False,
                     defines={}):

    if os.path.exists(track):
        # We use the same format as install manifests for the tracking
        # data.
        manifest = InstallManifest(path=track)
        remove_unaccounted = FileRegistry()
        dummy_file = BaseFile()

        finder = FileFinder(destdir, find_dotfiles=True)
        for dest in manifest._dests:
            for p, f in finder.find(dest):
                remove_unaccounted.add(p, dummy_file)

        remove_empty_directories = True
        remove_all_directory_symlinks = True

    else:
        # If tracking is enabled and there is no file, we don't want to
        # be removing anything.
        remove_unaccounted = False
        remove_empty_directories = False
        remove_all_directory_symlinks = False

    manifest = InstallManifest()
    for path in paths:
        manifest |= InstallManifest(path=path)

    copier = FileCopier()
    link_policy = "copy" if no_symlinks else "symlink"
    manifest.populate_registry(
        copier, defines_override=defines, link_policy=link_policy
    )
    result = copier.copy(destdir,
                         remove_unaccounted=remove_unaccounted,
                         remove_all_directory_symlinks=remove_all_directory_symlinks,
                         remove_empty_directories=remove_empty_directories)

    if track:
        # We should record files that we actually copied.
        # It is too late to expand wildcards when the track file is read.
        manifest.write(path=track, expand_pattern=True)

    return result
示例#23
0
文件: __init__.py 项目: urrytr/gecko
    def _synchronize_docs(self):
        m = InstallManifest()

        m.add_link(self.conf_py_path, 'conf.py')

        for dest, source in sorted(self.trees.items()):
            source_dir = os.path.join(self.topsrcdir, source)
            for root, dirs, files in os.walk(source_dir):
                for f in files:
                    source_path = os.path.join(root, f)
                    rel_source = source_path[len(source_dir) + 1:]

                    m.add_link(source_path, os.path.join(dest, rel_source))

        copier = FileCopier()
        m.populate_registry(copier)
        copier.copy(self.staging_dir)

        with open(self.index_path, 'rb') as fh:
            data = fh.read()

        def is_toplevel(key):
            """Whether the tree is nested under the toplevel index, or is
            nested under another tree's index.
            """
            for k in self.trees:
                if k == key:
                    continue
                if key.startswith(k):
                    return False
            return True

        toplevel_trees = {
            k: v
            for k, v in self.trees.items() if is_toplevel(k)
        }
        indexes = ['%s/index' % p for p in sorted(toplevel_trees.keys())]
        indexes = '\n   '.join(indexes)

        packages = [os.path.basename(p) for p in self.python_package_dirs]
        packages = ['python/%s' % p for p in packages]
        packages = '\n   '.join(sorted(packages))
        data = data.format(indexes=indexes, python_packages=packages)

        with open(os.path.join(self.staging_dir, 'index.rst'), 'wb') as fh:
            fh.write(data)
示例#24
0
def describe_install_manifest(manifest, dest_dir):
    try:
        manifest = InstallManifest(manifest)
    except UnreadableInstallManifest:
        raise IOError(errno.EINVAL, 'Error parsing manifest file', manifest)

    reg = FileRegistry()

    mapping = {}
    manifest.populate_registry(reg)
    for dest_file, src in reg:
        if hasattr(src, 'path'):
            dest_path = mozpath.join(dest_dir, dest_file)
            relsrc_path = mozpath.relpath(src.path, buildconfig.topsrcdir)
            mapping[dest_path] = relsrc_path

    return mapping
示例#25
0
def describe_install_manifest(manifest, dest_dir):
    try:
        manifest = InstallManifest(manifest)
    except UnreadableInstallManifest:
        raise IOError(errno.EINVAL, 'Error parsing manifest file', manifest)

    reg = FileRegistry()

    mapping = {}
    manifest.populate_registry(reg)
    dest_dir = mozpath.join(buildconfig.topobjdir, dest_dir)
    for dest_file, src in reg:
        if hasattr(src, 'path'):
            dest_path = mozpath.join(dest_dir, dest_file)
            relsrc_path = mozpath.relpath(src.path, buildconfig.topsrcdir)
            mapping[dest_path] = relsrc_path

    return mapping
示例#26
0
    def test_pattern_expansion(self):
        source = self.tmppath('source')
        os.mkdir(source)
        os.mkdir('%s/base' % source)
        os.mkdir('%s/base/foo' % source)

        with open('%s/base/foo/file1' % source, 'a'):
            pass

        with open('%s/base/foo/file2' % source, 'a'):
            pass

        m = InstallManifest()
        m.add_pattern_symlink('%s/base' % source, '**', 'dest')

        c = FileCopier()
        m.populate_registry(c)
        self.assertEqual(c.paths(), ['dest/foo/file1', 'dest/foo/file2'])
示例#27
0
    def test_pattern_expansion(self):
        source = self.tmppath("source")
        os.mkdir(source)
        os.mkdir("%s/base" % source)
        os.mkdir("%s/base/foo" % source)

        with open("%s/base/foo/file1" % source, "a"):
            pass

        with open("%s/base/foo/file2" % source, "a"):
            pass

        m = InstallManifest()
        m.add_pattern_link("%s/base" % source, "**", "dest")

        c = FileCopier()
        m.populate_registry(c)
        self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
示例#28
0
    def test_pattern_expansion(self):
        source = self.tmppath('source')
        os.mkdir(source)
        os.mkdir('%s/base' % source)
        os.mkdir('%s/base/foo' % source)

        with open('%s/base/foo/file1' % source, 'a'):
            pass

        with open('%s/base/foo/file2' % source, 'a'):
            pass

        m = InstallManifest()
        m.add_pattern_symlink('%s/base' % source, '**', 'dest')

        c = FileCopier()
        m.populate_registry(c)
        self.assertEqual(c.paths(), ['dest/foo/file1', 'dest/foo/file2'])
示例#29
0
    def test_pattern_expansion(self):
        source = self.tmppath("source")
        os.mkdir(source)
        os.mkdir("%s/base" % source)
        os.mkdir("%s/base/foo" % source)

        with open("%s/base/foo/file1" % source, "a"):
            pass

        with open("%s/base/foo/file2" % source, "a"):
            pass

        m = InstallManifest()
        m.add_pattern_symlink("%s/base" % source, "**", "dest")

        c = FileCopier()
        m.populate_registry(c)
        self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
示例#30
0
    def setup_benchmarks(self):
        """Make sure benchmarks are linked to the proper location in the objdir.

        Benchmarks can either live in-tree or in an external repository. In the latter
        case also clone/update the repository if necessary.
        """
        print("Updating external benchmarks from {}".format(
            BENCHMARK_REPOSITORY))

        # Set up the external repo
        external_repo_path = os.path.join(get_state_dir()[0],
                                          'performance-tests')

        if not os.path.isdir(external_repo_path):
            subprocess.check_call(
                ['git', 'clone', BENCHMARK_REPOSITORY, external_repo_path])
        else:
            subprocess.check_call(['git', 'checkout', 'master'],
                                  cwd=external_repo_path)
            subprocess.check_call(['git', 'pull'], cwd=external_repo_path)

        subprocess.check_call(['git', 'checkout', BENCHMARK_REVISION],
                              cwd=external_repo_path)

        # Link benchmarks to the objdir
        benchmark_paths = (
            os.path.join(external_repo_path, 'benchmarks'),
            os.path.join(self.topsrcdir, 'third_party', 'webkit',
                         'PerformanceTests'),
        )
        manifest = InstallManifest()

        for benchmark_path in benchmark_paths:
            for path in os.listdir(benchmark_path):
                abspath = os.path.join(benchmark_path, path)
                if not os.path.isdir(abspath) or path.startswith('.'):
                    continue

                manifest.add_link(abspath, path)

        copier = FileCopier()
        manifest.populate_registry(copier)
        copier.copy(
            os.path.join(self.topobjdir, 'testing', 'raptor', 'benchmarks'))
示例#31
0
def install_test_files(topsrcdir, topobjdir, tests_root):
    """Installs the requested test files to the objdir. This is invoked by
    test runners to avoid installing tens of thousands of test files when
    only a few tests need to be run.
    """

    manifest = InstallManifest(
        mozpath.join(topobjdir, "_build_manifests", "install", "_test_files"))

    harness_files_manifest = mozpath.join(topobjdir, "_build_manifests",
                                          "install", tests_root)

    if os.path.isfile(harness_files_manifest):
        # If the backend has generated an install manifest for test harness
        # files they are treated as a monolith and installed each time we
        # run tests. Fortunately there are not very many.
        manifest |= InstallManifest(harness_files_manifest)

    copier = FileCopier()
    manifest.populate_registry(copier)
    copier.copy(mozpath.join(topobjdir, tests_root), remove_unaccounted=False)
示例#32
0
    def test_add_entries_from(self):
        source = self.tmppath("source")
        os.mkdir(source)
        os.mkdir("%s/base" % source)
        os.mkdir("%s/base/foo" % source)

        with open("%s/base/foo/file1" % source, "a"):
            pass

        with open("%s/base/foo/file2" % source, "a"):
            pass

        m = InstallManifest()
        m.add_pattern_link("%s/base" % source, "**", "dest")

        p = InstallManifest()
        p.add_entries_from(m)
        self.assertEqual(len(p), 1)

        c = FileCopier()
        p.populate_registry(c)
        self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])

        q = InstallManifest()
        q.add_entries_from(m, base="target")
        self.assertEqual(len(q), 1)

        d = FileCopier()
        q.populate_registry(d)
        self.assertEqual(d.paths(),
                         ["target/dest/foo/file1", "target/dest/foo/file2"])

        # Some of the values in an InstallManifest include destination
        # information that is present in the keys.  Verify that we can
        # round-trip serialization.
        r = InstallManifest()
        r.add_entries_from(m)
        r.add_entries_from(m, base="target")
        self.assertEqual(len(r), 2)

        temp_path = self.tmppath("temp_path")
        r.write(path=temp_path)

        s = InstallManifest(path=temp_path)
        e = FileCopier()
        s.populate_registry(e)

        self.assertEqual(
            e.paths(),
            [
                "dest/foo/file1",
                "dest/foo/file2",
                "target/dest/foo/file1",
                "target/dest/foo/file2",
            ],
        )
示例#33
0
    def test_add_entries_from(self):
        source = self.tmppath('source')
        os.mkdir(source)
        os.mkdir('%s/base' % source)
        os.mkdir('%s/base/foo' % source)

        with open('%s/base/foo/file1' % source, 'a'):
            pass

        with open('%s/base/foo/file2' % source, 'a'):
            pass

        m = InstallManifest()
        m.add_pattern_link('%s/base' % source, '**', 'dest')

        p = InstallManifest()
        p.add_entries_from(m)
        self.assertEqual(len(p), 1)

        c = FileCopier()
        p.populate_registry(c)
        self.assertEqual(c.paths(), ['dest/foo/file1', 'dest/foo/file2'])

        q = InstallManifest()
        q.add_entries_from(m, base='target')
        self.assertEqual(len(q), 1)

        d = FileCopier()
        q.populate_registry(d)
        self.assertEqual(d.paths(),
                         ['target/dest/foo/file1', 'target/dest/foo/file2'])

        # Some of the values in an InstallManifest include destination
        # information that is present in the keys.  Verify that we can
        # round-trip serialization.
        r = InstallManifest()
        r.add_entries_from(m)
        r.add_entries_from(m, base='target')
        self.assertEqual(len(r), 2)

        temp_path = self.tmppath('temp_path')
        r.write(path=temp_path)

        s = InstallManifest(path=temp_path)
        e = FileCopier()
        s.populate_registry(e)

        self.assertEqual(e.paths(), [
            'dest/foo/file1', 'dest/foo/file2', 'target/dest/foo/file1',
            'target/dest/foo/file2'
        ])
示例#34
0
    def test_add_entries_from(self):
        source = self.tmppath('source')
        os.mkdir(source)
        os.mkdir('%s/base' % source)
        os.mkdir('%s/base/foo' % source)

        with open('%s/base/foo/file1' % source, 'a'):
            pass

        with open('%s/base/foo/file2' % source, 'a'):
            pass

        m = InstallManifest()
        m.add_pattern_link('%s/base' % source, '**', 'dest')

        p = InstallManifest()
        p.add_entries_from(m)
        self.assertEqual(len(p), 1)

        c = FileCopier()
        p.populate_registry(c)
        self.assertEqual(c.paths(), ['dest/foo/file1', 'dest/foo/file2'])

        q = InstallManifest()
        q.add_entries_from(m, base='target')
        self.assertEqual(len(q), 1)

        d = FileCopier()
        q.populate_registry(d)
        self.assertEqual(d.paths(), ['target/dest/foo/file1', 'target/dest/foo/file2'])

        # Some of the values in an InstallManifest include destination
        # information that is present in the keys.  Verify that we can
        # round-trip serialization.
        r = InstallManifest()
        r.add_entries_from(m)
        r.add_entries_from(m, base='target')
        self.assertEqual(len(r), 2)

        temp_path = self.tmppath('temp_path')
        r.write(path=temp_path)

        s = InstallManifest(path=temp_path)
        e = FileCopier()
        s.populate_registry(e)

        self.assertEqual(e.paths(),
                         ['dest/foo/file1', 'dest/foo/file2',
                          'target/dest/foo/file1', 'target/dest/foo/file2'])
示例#35
0
    def test_preprocessor(self):
        manifest = self.tmppath('m')
        deps = self.tmppath('m.pp')
        dest = self.tmppath('dest')
        include = self.tmppath('p_incl')

        with open(include, 'wt') as fh:
            fh.write('#define INCL\n')
        time = os.path.getmtime(include) - 3
        os.utime(include, (time, time))

        with open(self.tmppath('p_source'), 'wt') as fh:
            fh.write('#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n')
            fh.write('#ifdef DEPTEST\nPASS2\n#endif\n')
            fh.write('#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n')
        time = os.path.getmtime(self.tmppath('p_source')) - 3
        os.utime(self.tmppath('p_source'), (time, time))

        # Create and write a manifest with the preprocessed file, then apply it.
        # This should write out our preprocessed file.
        m = InstallManifest()
        m.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {
            'FOO': 'BAR',
            'BAZ': 'QUX'
        })
        m.write(path=manifest)

        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        c.copy(dest)

        self.assertTrue(os.path.exists(self.tmppath('dest/p_dest')))

        with open(self.tmppath('dest/p_dest'), 'rt') as fh:
            self.assertEqual(fh.read(), 'PASS1\n')

        # Create a second manifest with the preprocessed file, then apply it.
        # Since this manifest does not exist on the disk, there should not be a
        # dependency on it, and the preprocessed file should not be modified.
        m2 = InstallManifest()
        m2.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#',
                          {'DEPTEST': True})
        c = FileCopier()
        m2.populate_registry(c)
        result = c.copy(dest)

        self.assertFalse(self.tmppath('dest/p_dest') in result.updated_files)
        self.assertTrue(self.tmppath('dest/p_dest') in result.existing_files)

        # Write out the second manifest, then load it back in from the disk.
        # This should add the dependency on the manifest file, so our
        # preprocessed file should be regenerated with the new defines.
        # We also set the mtime on the destination file back, so it will be
        # older than the manifest file.
        m2.write(path=manifest)
        time = os.path.getmtime(manifest) - 1
        os.utime(self.tmppath('dest/p_dest'), (time, time))
        m2 = InstallManifest(path=manifest)
        c = FileCopier()
        m2.populate_registry(c)
        self.assertTrue(c.copy(dest))

        with open(self.tmppath('dest/p_dest'), 'rt') as fh:
            self.assertEqual(fh.read(), 'PASS2\n')

        # Set the time on the manifest back, so it won't be picked up as
        # modified in the next test
        time = os.path.getmtime(manifest) - 1
        os.utime(manifest, (time, time))

        # Update the contents of a file included by the source file. This should
        # cause the destination to be regenerated.
        with open(include, 'wt') as fh:
            fh.write('#define INCLTEST\n')

        time = os.path.getmtime(include) - 1
        os.utime(self.tmppath('dest/p_dest'), (time, time))
        c = FileCopier()
        m2.populate_registry(c)
        self.assertTrue(c.copy(dest))

        with open(self.tmppath('dest/p_dest'), 'rt') as fh:
            self.assertEqual(fh.read(), 'PASS2\nPASS3\n')
示例#36
0
def main(output_dirname, verbose, *input_dirs):
    # Map directories to source paths, like
    # `{'values-large-v11': ['/path/to/values-large-v11/strings.xml',
    #                        '/path/to/values-large-v11/colors.xml', ...], ...}`.
    values = defaultdict(list)
    # Map unversioned resource names to maps from versions to source paths, like:
    # `{'drawable-large/icon.png':
    #     {None: '/path/to/drawable-large/icon.png',
    #      11: '/path/to/drawable-large-v11/icon.png', ...}, ...}`.
    resources = defaultdict(dict)

    manifest = InstallManifest()

    for p in uniqify(input_dirs):
        finder = FileFinder(p, find_executables=False)

        values_pattern = 'values*/*.xml'
        for path, _ in finder.find('*/*'):
            if path in MANIFEST_EXCLUSIONS:
                continue

            source_path = mozpath.join(finder.base, path)

            if mozpath.match(path, values_pattern):
                dir, _name = path.split('/')
                dir = with_version(dir)
                values[dir].append(source_path)
                continue

            (resource, version) = classify(path)

            # Earlier paths are taken in preference to later paths.
            # This agrees with aapt.
            if version not in resources:
                resources[resource][version] = source_path

    # Step 1: merge all XML values into one single, sorted
    # per-configuration values.xml file.  This apes what the Android
    # Gradle resource merging algorithm does.
    merged_values = defaultdict(list)

    for dir, files in values.items():
        for file in files:
            values = ET.ElementTree(file=file).getroot()
            merged_values[dir].extend(values)

        values = ET.Element('resources')
        # Sort by <type> tag, and then by name.  Note that <item
        # type="type"> is equivalent to <type>.
        key = lambda x: (resource_type.get(x.get('type', x.tag)), x.get('name')
                         )
        values[:] = sorted(merged_values[dir], key=key)

        for value in values:
            if value.get('name') == 'TextAppearance.Design.Snackbar.Message':
                if value.get('{http://schemas.android.com/tools}override',
                             False):
                    values.remove(value)
                    break

        merged_values[dir] = values

    for dir, values in merged_values.items():
        o = mozpath.join(output_dirname, dir, '{}.xml'.format(dir))
        ensureParentDir(o)
        ET.ElementTree(values).write(o)

        manifest.add_required_exists(mozpath.join(dir, '{}.xml'.format(dir)))

    # Step 2a: add version numbers for unversioned features
    # corresponding to when the feature was introduced.  Resource
    # qualifiers will never be recognized by Android versions before
    # they were introduced.  For example, density qualifiers are
    # supported only in Android v4 and above.  Therefore
    # "drawable-hdpi" is implicitly "drawable-hdpi-v4".  We version
    # such unversioned resources here.
    for (resource, versions) in resources.items():
        if None in versions:
            dir, name = resource.split('/')
            new_dir = with_version(dir)
            (new_resource,
             new_version) = classify('{}/{}'.format(new_dir, name))
            if new_resource != resource:
                raise ValueError('this is bad')

            # `new_version` might be None: for example, `dir` might be "drawable".
            source_path = versions.pop(None)
            versions[new_version] = source_path

            if verbose:
                if new_version:
                    print("Versioning unversioned resource {} as {}-v{}/{}".
                          format(source_path, dir, new_version, name))

    # TODO: make this a command line argument that takes MOZ_ANDROID_MIN_SDK_VERSION.
    min_sdk = 15
    retained = defaultdict(dict)

    # Step 2b: drop resource directories that will never be used by
    # Android on device.  This depends on the minimum supported
    # Android SDK version.  Suppose the minimum SDK is 15 and we have
    # drawable-v4/icon.png and drawable-v11/icon.png.  The v4 version
    # will never be chosen, since v15 is always greater than v11.
    for (resource, versions) in resources.items():

        def key(v):
            return 0 if v is None else v

        # Versions in descending order.
        version_list = sorted(versions.keys(), key=key, reverse=True)
        for version in version_list:
            retained[resource][version] = versions[version]
            if version is not None and version <= min_sdk:
                break

    if set(retained.keys()) != set(resources.keys()):
        raise ValueError('Something terrible has happened; retained '
                         'resource names do not match input resources '
                         'names')

    if verbose:
        for resource in resources:
            if resources[resource] != retained[resource]:
                for version in sorted(resources[resource].keys(),
                                      reverse=True):
                    if version in retained[resource]:
                        print("Keeping reachable resource {}".format(
                            resources[resource][version]))
                    else:
                        print("Dropping unreachable resource {}".format(
                            resources[resource][version]))

    # Populate manifest.
    for (resource, versions) in retained.items():
        for version in sorted(versions.keys(), reverse=True):
            path = resource
            if version:
                dir, name = resource.split('/')
                path = '{}-v{}/{}'.format(dir, version, name)
            manifest.add_copy(versions[version], path)

    copier = FileCopier()
    manifest.populate_registry(copier)
    print('mr', os.getcwd())
    result = copier.copy(output_dirname,
                         remove_unaccounted=True,
                         remove_all_directory_symlinks=False,
                         remove_empty_directories=True)

    if verbose:
        print('Updated:', result.updated_files_count)
        print('Removed:',
              result.removed_files_count + result.removed_directories_count)
        print('Existed:', result.existing_files_count)

    return 0
示例#37
0
文件: msix.py 项目: Floflis/gecko-b2g
def repackage_msix(
    dir_or_package,
    channel=None,
    branding=None,
    template=None,
    distribution_dirs=[],
    locale_allowlist=set(),
    version=None,
    vendor=None,
    displayname=None,
    app_name="firefox",
    identity=None,
    publisher=None,
    publisher_display_name="Mozilla Corporation",
    arch=None,
    output=None,
    force=False,
    log=None,
    verbose=False,
    makeappx=None,
):
    if not channel:
        raise Exception("channel is required")
    if channel not in ["official", "beta", "aurora", "nightly", "unofficial"]:
        raise Exception("channel is unrecognized: {}".format(channel))

    if not branding:
        raise Exception("branding dir is required")
    if not os.path.isdir(branding):
        raise Exception("branding dir {} does not exist".format(branding))

    # TODO: maybe we can fish this from the package directly?  Maybe from a DLL,
    # maybe from application.ini?
    if arch is None or arch not in _MSIX_ARCH.keys():
        raise Exception(
            "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys())
        )

    if not os.path.exists(dir_or_package):
        raise Exception("{} does not exist".format(dir_or_package))

    if (
        os.path.isfile(dir_or_package)
        and os.path.splitext(dir_or_package)[1] == ".msix"
    ):
        # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
        msix_dir = mozpath.normsep(
            mozpath.join(
                get_state_dir(),
                "cache",
                "mach-msix",
                "msix-unpack",
            )
        )

        if os.path.exists(msix_dir):
            shutil.rmtree(msix_dir)
        ensureParentDir(msix_dir)

        dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose)

    log(
        logging.INFO,
        "msix",
        {
            "input": dir_or_package,
        },
        "Adding files from '{input}'",
    )

    if os.path.isdir(dir_or_package):
        finder = FileFinder(dir_or_package)
    else:
        finder = JarFinder(dir_or_package, JarReader(dir_or_package))

    values = get_application_ini_values(
        finder,
        dict(section="App", value="CodeName", fallback="Name"),
        dict(section="App", value="Vendor"),
    )
    first = next(values)
    displayname = displayname or "Mozilla {}".format(first)
    second = next(values)
    vendor = vendor or second

    # For `AppConstants.jsm` and `brand.properties`, which are in the omnijar in packaged builds.
    # The nested langpack XPI files can't be read by `mozjar.py`.
    unpack_finder = UnpackFinder(finder, unpack_xpi=False)

    if not version:
        values = get_appconstants_jsm_values(
            unpack_finder, "MOZ_APP_VERSION_DISPLAY", "MOZ_BUILDID"
        )
        display_version = next(values)
        buildid = next(values)
        version = get_embedded_version(display_version, buildid)
        log(
            logging.INFO,
            "msix",
            {
                "version": version,
                "display_version": display_version,
                "buildid": buildid,
            },
            "AppConstants.jsm display version is '{display_version}' and build ID is '{buildid}':"
            + " embedded version will be '{version}'",
        )

    # TODO: Bug 1721922: localize this description via Fluent.
    lines = []
    for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"):
        lines.extend(
            line
            for line in f.open().read().decode("utf-8").splitlines()
            if "brandFullName" in line
        )
    (brandFullName,) = lines  # We expect exactly one definition.
    _, _, brandFullName = brandFullName.partition("=")
    brandFullName = brandFullName.strip()

    # We don't have a build at repackage-time to gives us this value, and the
    # source of truth is a branding-specific `configure.sh` shell script that we
    # can't easily evaluate completely here.  Instead, we take the last value
    # from `configure.sh`.
    lines = [
        line
        for line in open(mozpath.join(branding, "configure.sh")).readlines()
        if "MOZ_IGECKOBACKCHANNEL_IID" in line
    ]
    MOZ_IGECKOBACKCHANNEL_IID = lines[-1]
    _, _, MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.partition("=")
    MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.strip()
    if MOZ_IGECKOBACKCHANNEL_IID.startswith(('"', "'")):
        MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID[1:-1]

    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    output_dir = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel)
        )
    )

    if channel == "beta":
        # Release (official) and Beta share branding.  Differentiate Beta a little bit.
        displayname += " Beta"
        brandFullName += " Beta"

    # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta
    # Package Root'.  This is `BrandFullName` in the installer, and we want to
    # be close but to not match.  By not matching, we hope to prevent confusion
    # and/or errors between regularly installed builds and App Package builds.
    instdir = "{} Package Root".format(displayname)

    # The standard package name is like "CompanyNoSpaces.ProductNoSpaces".
    identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "")

    # We might want to include the publisher ID hash here.  I.e.,
    # "__{publisherID}".  My locally produced MSIX was named like
    # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a
    # missing field, but it's not necessary, since this is just an output file name.
    package_output_name = "{identity}_{version}_{arch}".format(
        identity=identity, version=version, arch=_MSIX_ARCH[arch]
    )
    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    default_output = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name)
        )
    )
    output = output or default_output
    log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}")

    m = InstallManifest()
    m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri")

    m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets")
    m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS")

    copier = FileCopier()

    # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead.
    for p, f in finder:
        if not os.path.isdir(dir_or_package):
            # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe".
            pp = os.path.relpath(p, "firefox")
        else:
            # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already.
            pp = p

        if pp.startswith("distribution"):
            # Treat any existing distribution as a distribution directory,
            # potentially with language packs. This makes it easy to repack
            # unpacked MSIXes.
            distribution_dir = mozpath.join(dir_or_package, "distribution")
            if distribution_dir not in distribution_dirs:
                distribution_dirs.append(distribution_dir)

            continue

        copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f)

    # Locales to declare as supported in `AppxManifest.xml`.
    locales = set(["en-US"])

    for distribution_dir in [
        mozpath.join(template, "distribution")
    ] + distribution_dirs:
        log(
            logging.INFO,
            "msix",
            {"dir": distribution_dir},
            "Adding distribution files from {dir}",
        )

        # In automation, we have no easy way to remap the names of artifacts fetched from dependent
        # tasks.  In particular, langpacks will be named like `target.langpack.xpi`.  The fetch
        # tasks do allow us to put them in a per-locale directory, so that the entire set can be
        # fetched.  Here we remap the names.
        finder = FileFinder(distribution_dir)

        for p, f in finder:
            locale = None
            if os.path.basename(p) == "target.langpack.xpi":
                # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE".  This is how langpacks
                # are presented in CI.
                base, locale = os.path.split(os.path.dirname(p))

                # Like "locale-LOCALE/[email protected]".  This is what AMO
                # serves and how flatpak builds name langpacks, but not how snap builds name
                # langpacks.  I can't explain the discrepancy.
                dest = mozpath.normsep(
                    mozpath.join(
                        base,
                        f"locale-{locale}",
                        f"langpack-{locale}@firefox.mozilla.org.xpi",
                    )
                )

                log(
                    logging.DEBUG,
                    "msix",
                    {"path": p, "dest": dest},
                    "Renaming langpack {path} to {dest}",
                )

            elif os.path.basename(p).startswith("langpack-"):
                # Turn "/path/to/[email protected]" into "LOCALE".  This is
                # how langpacks are presented from an unpacked MSIX.
                _, _, locale = os.path.basename(p).partition("langpack-")
                locale, _, _ = locale.partition("@")
                dest = p

            else:
                dest = p

            if locale:
                locale = locale.strip().lower()
                locales.add(locale)
                log(
                    logging.DEBUG,
                    "msix",
                    {"locale": locale, "dest": dest},
                    "Distributing locale '{locale}' from {dest}",
                )

            dest = mozpath.normsep(
                mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest)
            )
            if copier.contains(dest):
                log(
                    logging.INFO,
                    "msix",
                    {"dest": dest, "path": mozpath.join(finder.base, p)},
                    "Skipping duplicate: {dest} from {path}",
                )
                continue

            log(
                logging.DEBUG,
                "msix",
                {"dest": dest, "path": mozpath.join(finder.base, p)},
                "Adding distribution path: {dest} from {path}",
            )

            copier.add(
                dest,
                f,
            )

    locales.remove("en-US")

    # Windows MSIX packages support a finite set of locales: see
    # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in
    # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales.
    # We distribute all of the langpacks supported by the release channel in our MSIX, which is
    # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales.  But we
    # only advertise support in the App manifest for the intersection of that set and the set of
    # supported locales.
    #
    # We distribute all langpacks to avoid the following issue.  Suppose a user manually installs a
    # langpack that is not supported by Windows, and then updates the installed MSIX package.  MSIX
    # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to
    # update the langpack before the update.  But, since all langpacks are bundled with the MSIX,
    # that langpack will be up-to-date, preventing one class of YSOD.
    unadvertised = set()
    if locale_allowlist:
        unadvertised = locales - locale_allowlist
        locales = locales & locale_allowlist
    for locale in sorted(unadvertised):
        log(
            logging.INFO,
            "msix",
            {"locale": locale},
            "Not advertising distributed locale '{locale}' that is not recognized by Windows",
        )

    locales = ["en-US"] + list(sorted(locales))
    resource_language_list = "\n".join(
        f'    <Resource Language="{locale}" />' for locale in sorted(locales)
    )

    defines = {
        "APPX_ARCH": _MSIX_ARCH[arch],
        "APPX_DISPLAYNAME": brandFullName,
        "APPX_DESCRIPTION": brandFullName,
        # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or
        # 'Mozilla.MozillaFirefoxNightly'.
        "APPX_IDENTITY": identity,
        # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox
        # Beta Package Root'.  See above.
        "APPX_INSTDIR": instdir,
        # Like 'Firefox%20Package%20Root'.
        "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir),
        "APPX_PUBLISHER": publisher,
        "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name,
        "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list,
        "APPX_VERSION": version,
        "MOZ_APP_DISPLAYNAME": displayname,
        "MOZ_APP_NAME": app_name,
        "MOZ_IGECKOBACKCHANNEL_IID": MOZ_IGECKOBACKCHANNEL_IID,
    }

    m.add_preprocess(
        mozpath.join(template, "AppxManifest.xml.in"),
        "AppxManifest.xml",
        [],
        defines=defines,
        marker="<!-- #",  # So that we can have well-formed XML.
    )
    m.populate_registry(copier)

    output_dir = mozpath.abspath(output_dir)
    ensureParentDir(output_dir)

    start = time.time()
    result = copier.copy(
        output_dir, remove_empty_directories=True, skip_if_older=not force
    )
    if log:
        log_copy_result(log, time.time() - start, output_dir, result)

    if verbose:
        # Dump AppxManifest.xml contents for ease of debugging.
        log(logging.DEBUG, "msix", {}, "AppxManifest.xml")
        log(logging.DEBUG, "msix", {}, ">>>")
        for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines():
            log(logging.DEBUG, "msix", {}, line[:-1])  # Drop trailing line terminator.
        log(logging.DEBUG, "msix", {}, "<<<")

    if not makeappx:
        makeappx = find_sdk_tool("makeappx.exe", log=log)
    if not makeappx:
        raise ValueError(
            "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH"
        )

    # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix`
    # supports only hyphen style.  `makeappx.exe` allows to overwrite and to
    # provide more feedback, so we prefer invoking with these flags.  This will
    # also accommodate `wine makeappx.exe`.
    stdout = subprocess.run(
        [makeappx], check=False, capture_output=True, universal_newlines=True
    ).stdout
    is_makeappx = "MakeAppx Tool" in stdout

    if is_makeappx:
        args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"]
    else:
        args = [makeappx, "pack", "-d", output_dir, "-p", output]
    if verbose and is_makeappx:
        args.append("/verbose")
    joined = " ".join(shlex_quote(arg) for arg in args)
    log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}")

    sys.stdout.flush()  # Otherwise the subprocess output can be interleaved.
    if verbose:
        subprocess.check_call(args, universal_newlines=True)
    else:
        # Suppress output unless we fail.
        try:
            subprocess.check_output(args, universal_newlines=True)
        except subprocess.CalledProcessError as e:
            sys.stderr.write(e.output)
            raise

    return output
示例#38
0
    def _synchronize_docs(self, app):
        m = InstallManifest()

        with open(os.path.join(MAIN_DOC_PATH, "config.yml"), "r") as fh:
            tree_config = yaml.safe_load(fh)["categories"]

        m.add_link(self.conf_py_path, "conf.py")

        for dest, source in sorted(self.trees.items()):
            source_dir = os.path.join(self.topsrcdir, source)
            for root, _, files in os.walk(source_dir):
                for f in files:
                    source_path = os.path.normpath(os.path.join(root, f))
                    rel_source = source_path[len(source_dir) + 1:]
                    target = os.path.normpath(os.path.join(dest, rel_source))
                    if source_path.endswith(".md"):
                        self._process_markdown(m, source_path,
                                               os.path.join(".", target))
                    else:
                        m.add_link(source_path, target)

        copier = FileCopier()
        m.populate_registry(copier)
        copier.copy(self.staging_dir, remove_empty_directories=False)

        with open(self.index_path, "r") as fh:
            data = fh.read()

        def is_toplevel(key):
            """Whether the tree is nested under the toplevel index, or is
            nested under another tree's index.
            """
            for k in self.trees:
                if k == key:
                    continue
                if key.startswith(k):
                    return False
            return True

        def format_paths(paths):
            source_doc = ["%s/index" % p for p in paths]
            return "\n   ".join(source_doc)

        toplevel_trees = {
            k: v
            for k, v in self.trees.items() if is_toplevel(k)
        }

        CATEGORIES = {}
        # generate the datastructure to deal with the tree
        for t in tree_config:
            CATEGORIES[t] = format_paths(tree_config[t])

        # During livereload, we don't correctly rebuild the full document
        # tree (Bug 1557020). The page is no longer referenced within the index
        # tree, thus we shall check categorisation only if complete tree is being rebuilt.
        if app.srcdir == self.topsrcdir:
            indexes = set([
                os.path.normpath(os.path.join(p, "index"))
                for p in toplevel_trees.keys()
            ])
            # Format categories like indexes
            cats = "\n".join(CATEGORIES.values()).split("\n")
            # Remove heading spaces
            cats = [os.path.normpath(x.strip()) for x in cats]
            indexes = tuple(set(indexes) - set(cats))
            if indexes:
                # In case a new doc isn't categorized
                print(indexes)
                raise Exception(
                    "Uncategorized documentation. Please add it in docs/config.yml"
                )

        data = data.format(**CATEGORIES)

        with open(os.path.join(self.staging_dir, "index.rst"), "w") as fh:
            fh.write(data)
示例#39
0
    def test_preprocessor(self):
        manifest = self.tmppath('m')
        deps = self.tmppath('m.pp')
        dest = self.tmppath('dest')
        include = self.tmppath('p_incl')

        with open(include, 'wt') as fh:
            fh.write('#define INCL\n')
        time = os.path.getmtime(include) - 3
        os.utime(include, (time, time))

        with open(self.tmppath('p_source'), 'wt') as fh:
            fh.write('#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n')
            fh.write('#ifdef DEPTEST\nPASS2\n#endif\n')
            fh.write('#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n')
        time = os.path.getmtime(self.tmppath('p_source')) - 3
        os.utime(self.tmppath('p_source'), (time, time))

        # Create and write a manifest with the preprocessed file, then apply it.
        # This should write out our preprocessed file.
        m = InstallManifest()
        m.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'})
        m.write(path=manifest)

        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        c.copy(dest)

        self.assertTrue(os.path.exists(self.tmppath('dest/p_dest')))

        with open(self.tmppath('dest/p_dest'), 'rt') as fh:
            self.assertEqual(fh.read(), 'PASS1\n')

        # Create a second manifest with the preprocessed file, then apply it.
        # Since this manifest does not exist on the disk, there should not be a
        # dependency on it, and the preprocessed file should not be modified.
        m2 = InstallManifest()
        m2.add_preprocess(self.tmppath('p_source'), 'p_dest', deps, '#', {'DEPTEST':True})
        c = FileCopier()
        m2.populate_registry(c)
        result = c.copy(dest)

        self.assertFalse(self.tmppath('dest/p_dest') in result.updated_files)
        self.assertTrue(self.tmppath('dest/p_dest') in result.existing_files)

        # Write out the second manifest, then load it back in from the disk.
        # This should add the dependency on the manifest file, so our
        # preprocessed file should be regenerated with the new defines.
        # We also set the mtime on the destination file back, so it will be
        # older than the manifest file.
        m2.write(path=manifest)
        time = os.path.getmtime(manifest) - 1
        os.utime(self.tmppath('dest/p_dest'), (time, time))
        m2 = InstallManifest(path=manifest)
        c = FileCopier()
        m2.populate_registry(c)
        self.assertTrue(c.copy(dest))

        with open(self.tmppath('dest/p_dest'), 'rt') as fh:
            self.assertEqual(fh.read(), 'PASS2\n')

        # Set the time on the manifest back, so it won't be picked up as
        # modified in the next test
        time = os.path.getmtime(manifest) - 1
        os.utime(manifest, (time, time))

        # Update the contents of a file included by the source file. This should
        # cause the destination to be regenerated.
        with open(include, 'wt') as fh:
            fh.write('#define INCLTEST\n')

        time = os.path.getmtime(include) - 1
        os.utime(self.tmppath('dest/p_dest'), (time, time))
        c = FileCopier()
        m2.populate_registry(c)
        self.assertTrue(c.copy(dest))

        with open(self.tmppath('dest/p_dest'), 'rt') as fh:
            self.assertEqual(fh.read(), 'PASS2\nPASS3\n')
示例#40
0
    def test_preprocessor_dependencies(self):
        manifest = self.tmppath('m')
        deps = self.tmppath('m.pp')
        dest = self.tmppath('dest')
        source = self.tmppath('p_source')
        destfile = self.tmppath('dest/p_dest')
        include = self.tmppath('p_incl')
        os.mkdir(dest)

        with open(source, 'wt') as fh:
            fh.write('#define SRC\nSOURCE\n')
        time = os.path.getmtime(source) - 3
        os.utime(source, (time, time))

        with open(include, 'wt') as fh:
            fh.write('INCLUDE\n')
        time = os.path.getmtime(source) - 3
        os.utime(include, (time, time))

        # Create and write a manifest with the preprocessed file.
        m = InstallManifest()
        m.add_preprocess(source, 'p_dest', deps, '#', {
            'FOO': 'BAR',
            'BAZ': 'QUX'
        })
        m.write(path=manifest)

        time = os.path.getmtime(source) - 5
        os.utime(manifest, (time, time))

        # Now read the manifest back in, and apply it. This should write out
        # our preprocessed file.
        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        self.assertTrue(c.copy(dest))

        with open(destfile, 'rt') as fh:
            self.assertEqual(fh.read(), 'SOURCE\n')

        # Next, modify the source to #INCLUDE another file.
        with open(source, 'wt') as fh:
            fh.write('SOURCE\n#include p_incl\n')
        time = os.path.getmtime(source) - 1
        os.utime(destfile, (time, time))

        # Apply the manifest, and confirm that it also reads the newly included
        # file.
        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        c.copy(dest)

        with open(destfile, 'rt') as fh:
            self.assertEqual(fh.read(), 'SOURCE\nINCLUDE\n')

        # Set the time on the source file back, so it won't be picked up as
        # modified in the next test.
        time = os.path.getmtime(source) - 1
        os.utime(source, (time, time))

        # Now, modify the include file (but not the original source).
        with open(include, 'wt') as fh:
            fh.write('INCLUDE MODIFIED\n')
        time = os.path.getmtime(include) - 1
        os.utime(destfile, (time, time))

        # Apply the manifest, and confirm that the change to the include file
        # is detected. That should cause the preprocessor to run again.
        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        c.copy(dest)

        with open(destfile, 'rt') as fh:
            self.assertEqual(fh.read(), 'SOURCE\nINCLUDE MODIFIED\n')

        # ORing an InstallManifest should copy file dependencies
        m = InstallManifest()
        m |= InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        e = c._files['p_dest']
        self.assertEqual(e.extra_depends, [manifest])
示例#41
0
    def _synchronize_docs(self):
        m = InstallManifest()

        with open(os.path.join(MAIN_DOC_PATH, 'config.yml'), 'r') as fh:
            tree_config = yaml.safe_load(fh)['categories']

        m.add_link(self.conf_py_path, 'conf.py')

        for dest, source in sorted(self.trees.items()):
            source_dir = os.path.join(self.topsrcdir, source)
            for root, dirs, files in os.walk(source_dir):
                for f in files:
                    source_path = os.path.join(root, f)
                    rel_source = source_path[len(source_dir) + 1:]

                    m.add_link(source_path, os.path.join(dest, rel_source))

        copier = FileCopier()
        m.populate_registry(copier)
        copier.copy(self.staging_dir)

        with open(self.index_path, 'rb') as fh:
            data = fh.read()

        def is_toplevel(key):
            """Whether the tree is nested under the toplevel index, or is
            nested under another tree's index.
            """
            for k in self.trees:
                if k == key:
                    continue
                if key.startswith(k):
                    return False
            return True

        def format_paths(paths):
            source_doc = ['%s/index' % p for p in paths]
            return '\n   '.join(source_doc)

        toplevel_trees = {
            k: v
            for k, v in self.trees.items() if is_toplevel(k)
        }

        CATEGORIES = {}
        # generate the datastructure to deal with the tree
        for t in tree_config:
            CATEGORIES[t] = format_paths(tree_config[t])

        indexes = set(['%s/index' % p for p in toplevel_trees.keys()])
        # Format categories like indexes
        cats = '\n'.join(CATEGORIES.values()).split("\n")
        # Remove heading spaces
        cats = [x.strip() for x in cats]
        indexes = tuple(set(indexes) - set(cats))
        if indexes:
            # In case a new doc isn't categorized
            print(indexes)
            raise Exception(
                "Uncategorized documentation. Please add it in tools/docs/config.yml"
            )

        data = data.format(**CATEGORIES)

        with open(os.path.join(self.staging_dir, 'index.rst'), 'wb') as fh:
            fh.write(data)
示例#42
0
    def test_preprocessor_dependencies(self):
        manifest = self.tmppath('m')
        deps = self.tmppath('m.pp')
        dest = self.tmppath('dest')
        source = self.tmppath('p_source')
        destfile = self.tmppath('dest/p_dest')
        include = self.tmppath('p_incl')
        os.mkdir(dest)

        with open(source, 'wt') as fh:
            fh.write('#define SRC\nSOURCE\n')
        time = os.path.getmtime(source) - 3
        os.utime(source, (time, time))

        with open(include, 'wt') as fh:
            fh.write('INCLUDE\n')
        time = os.path.getmtime(source) - 3
        os.utime(include, (time, time))

        # Create and write a manifest with the preprocessed file.
        m = InstallManifest()
        m.add_preprocess(source, 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'})
        m.write(path=manifest)

        time = os.path.getmtime(source) - 5
        os.utime(manifest, (time, time))

        # Now read the manifest back in, and apply it. This should write out
        # our preprocessed file.
        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        self.assertTrue(c.copy(dest))

        with open(destfile, 'rt') as fh:
            self.assertEqual(fh.read(), 'SOURCE\n')

        # Next, modify the source to #INCLUDE another file.
        with open(source, 'wt') as fh:
            fh.write('SOURCE\n#include p_incl\n')
        time = os.path.getmtime(source) - 1
        os.utime(destfile, (time, time))

        # Apply the manifest, and confirm that it also reads the newly included
        # file.
        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        c.copy(dest)

        with open(destfile, 'rt') as fh:
            self.assertEqual(fh.read(), 'SOURCE\nINCLUDE\n')

        # Set the time on the source file back, so it won't be picked up as
        # modified in the next test.
        time = os.path.getmtime(source) - 1
        os.utime(source, (time, time))

        # Now, modify the include file (but not the original source).
        with open(include, 'wt') as fh:
            fh.write('INCLUDE MODIFIED\n')
        time = os.path.getmtime(include) - 1
        os.utime(destfile, (time, time))

        # Apply the manifest, and confirm that the change to the include file
        # is detected. That should cause the preprocessor to run again.
        m = InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        c.copy(dest)

        with open(destfile, 'rt') as fh:
            self.assertEqual(fh.read(), 'SOURCE\nINCLUDE MODIFIED\n')

        # ORing an InstallManifest should copy file dependencies
        m = InstallManifest()
        m |= InstallManifest(path=manifest)
        c = FileCopier()
        m.populate_registry(c)
        e = c._files['p_dest']
        self.assertEqual(e.extra_depends, [manifest])