Exemplo n.º 1
0
    def test_optional_exists_creates_unneeded_directory(self):
        """Demonstrate that a directory not strictly required, but specified
        as the path to an optional file, will be unnecessarily created.

        This behaviour is wrong; fixing it is tracked by Bug 972432;
        and this test exists to guard against unexpected changes in
        behaviour.
        """

        dest = self.tmppath("dest")

        copier = FileCopier()
        copier.add("foo/bar", ExistingFile(required=False))

        result = copier.copy(dest)

        st = os.lstat(self.tmppath("dest/foo"))
        self.assertFalse(stat.S_ISLNK(st.st_mode))
        self.assertTrue(stat.S_ISDIR(st.st_mode))

        # What's worse, we have no record that dest was created.
        self.assertEquals(len(result.updated_files), 0)

        # But we do have an erroneous record of an optional file
        # existing when it does not.
        self.assertIn(self.tmppath("dest/foo/bar"), result.existing_files)
Exemplo n.º 2
0
    def output_changes(self, verbose=True):
        '''
        Return an iterator of `FasterBuildChange` instances as outputs
        from the faster build system are updated.
        '''
        for change in self.input_changes(verbose=verbose):
            now = datetime.datetime.utcnow()

            for unrecognized in sorted(change.unrecognized):
                print_line('watch', '! {}'.format(unrecognized), now=now)

            all_outputs = set()
            for input in sorted(change.input_to_outputs):
                outputs = change.input_to_outputs[input]

                print_line('watch', '< {}'.format(input), now=now)
                for output in sorted(outputs):
                    print_line('watch', '> {}'.format(output), now=now)
                all_outputs |= outputs

            if all_outputs:
                partial_copier = FileCopier()
                for output in all_outputs:
                    partial_copier.add(output, self.file_copier[output])

                self.incremental_copy(partial_copier, force=True, verbose=verbose)
                yield change
Exemplo n.º 3
0
    def test_symlink_directory_replaced(self):
        """Directory symlinks in destination are replaced if they need to be
        real directories."""
        if not self.symlink_supported:
            return

        dest = self.tmppath('dest')

        copier = FileCopier()
        copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))

        os.makedirs(self.tmppath('dest/foo'))
        dummy = self.tmppath('dummy')
        os.mkdir(dummy)
        link = self.tmppath('dest/foo/bar')
        os.symlink(dummy, link)

        result = copier.copy(dest)

        st = os.lstat(link)
        self.assertFalse(stat.S_ISLNK(st.st_mode))
        self.assertTrue(stat.S_ISDIR(st.st_mode))

        self.assertEqual(self.all_files(dest), set(copier.paths()))

        self.assertEqual(result.removed_directories, set())
        self.assertEqual(len(result.updated_files), 1)
Exemplo n.º 4
0
    def output_changes(self, verbose=True):
        '''
        Return an iterator of `FasterBuildChange` instances as outputs
        from the faster build system are updated.
        '''
        for change in self.input_changes(verbose=verbose):
            now = datetime.datetime.utcnow()

            for unrecognized in sorted(change.unrecognized):
                print_line('watch', '! {}'.format(unrecognized), now=now)

            all_outputs = set()
            for input in sorted(change.input_to_outputs):
                outputs = change.input_to_outputs[input]

                print_line('watch', '< {}'.format(input), now=now)
                for output in sorted(outputs):
                    print_line('watch', '> {}'.format(output), now=now)
                all_outputs |= outputs

            if all_outputs:
                partial_copier = FileCopier()
                for output in all_outputs:
                    partial_copier.add(output, self.file_copier[output])

                self.incremental_copy(partial_copier, force=True, verbose=verbose)
                yield change
Exemplo n.º 5
0
    def test_symlink_directory(self):
        """Directory symlinks in destination are deleted."""
        if not self.symlink_supported:
            return

        dest = self.tmppath('dest')

        copier = FileCopier()
        copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))

        os.makedirs(self.tmppath('dest/foo'))
        dummy = self.tmppath('dummy')
        os.mkdir(dummy)
        link = self.tmppath('dest/foo/bar')
        os.symlink(dummy, link)

        result = copier.copy(dest)

        st = os.lstat(link)
        self.assertFalse(stat.S_ISLNK(st.st_mode))
        self.assertTrue(stat.S_ISDIR(st.st_mode))

        self.assertEqual(self.all_files(dest), set(copier.paths()))

        self.assertEqual(result.removed_directories, set())
        self.assertEqual(len(result.updated_files), 1)
Exemplo n.º 6
0
def main():
    parser = argparse.ArgumentParser(
        description="Merge two directories, creating Universal binaries for "
        "executables and libraries they contain.")
    parser.add_argument("dir1", help="Directory")
    parser.add_argument("dir2", help="Directory to merge")

    options = parser.parse_args()

    buildconfig.substs["OS_ARCH"] = "Darwin"
    buildconfig.substs["LIPO"] = os.environ.get("LIPO")

    dir1_finder = FileFinder(options.dir1,
                             find_executables=True,
                             find_dotfiles=True)
    dir2_finder = FileFinder(options.dir2,
                             find_executables=True,
                             find_dotfiles=True)
    finder = UnifiedTestFinder(dir1_finder, dir2_finder)

    copier = FileCopier()
    with errors.accumulate():
        for p, f in finder:
            copier.add(p, f)

    copier.copy(options.dir1, skip_if_older=False)
Exemplo n.º 7
0
    def test_symlink_directory_replaced(self):
        """Directory symlinks in destination are replaced if they need to be
        real directories."""
        if not self.symlink_supported:
            return

        dest = self.tmppath("dest")

        copier = FileCopier()
        copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))

        os.makedirs(self.tmppath("dest/foo"))
        dummy = self.tmppath("dummy")
        os.mkdir(dummy)
        link = self.tmppath("dest/foo/bar")
        os.symlink(dummy, link)

        result = copier.copy(dest)

        st = os.lstat(link)
        self.assertFalse(stat.S_ISLNK(st.st_mode))
        self.assertTrue(stat.S_ISDIR(st.st_mode))

        self.assertEqual(self.all_files(dest), set(copier.paths()))

        self.assertEqual(result.removed_directories, set())
        self.assertEqual(len(result.updated_files), 1)
Exemplo n.º 8
0
    def test_optional_exists_creates_unneeded_directory(self):
        """Demonstrate that a directory not strictly required, but specified
        as the path to an optional file, will be unnecessarily created.

        This behaviour is wrong; fixing it is tracked by Bug 972432;
        and this test exists to guard against unexpected changes in
        behaviour.
        """

        dest = self.tmppath('dest')

        copier = FileCopier()
        copier.add('foo/bar', ExistingFile(required=False))

        result = copier.copy(dest)

        st = os.lstat(self.tmppath('dest/foo'))
        self.assertFalse(stat.S_ISLNK(st.st_mode))
        self.assertTrue(stat.S_ISDIR(st.st_mode))

        # What's worse, we have no record that dest was created.
        self.assertEquals(len(result.updated_files), 0)

        # But we do have an erroneous record of an optional file
        # existing when it does not.
        self.assertIn(self.tmppath('dest/foo/bar'), result.existing_files)
Exemplo n.º 9
0
def strip(dir):
    copier = FileCopier()
    # The FileFinder will give use ExecutableFile instances for files
    # that can be stripped, and copying ExecutableFiles defaults to
    # stripping them when buildconfig.substs['PKG_STRIP'] is set.
    for p, f in FileFinder(dir, find_executables=True):
        copier.add(p, f)
    copier.copy(dir)
Exemplo n.º 10
0
def strip(dir):
    copier = FileCopier()
    # The FileFinder will give use ExecutableFile instances for files
    # that can be stripped, and copying ExecutableFiles defaults to
    # stripping them unless buildconfig.substs['PKG_SKIP_STRIP'] is set.
    for p, f in FileFinder(dir):
        copier.add(p, f)
    copier.copy(dir)
Exemplo n.º 11
0
    def test_no_remove(self):
        copier = FileCopier()
        copier.add('foo', GeneratedFile('foo'))

        with open(self.tmppath('bar'), 'a'):
            pass

        os.mkdir(self.tmppath('emptydir'))

        result = copier.copy(self.tmpdir, remove_unaccounted=False)

        self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar']))
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories,
                         set([self.tmppath('emptydir')]))
Exemplo n.º 12
0
    def test_no_remove(self):
        copier = FileCopier()
        copier.add('foo', GeneratedFile('foo'))

        with open(self.tmppath('bar'), 'a'):
            pass

        os.mkdir(self.tmppath('emptydir'))

        result = copier.copy(self.tmpdir, remove_unaccounted=False)

        self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar']))
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories,
            set([self.tmppath('emptydir')]))
Exemplo n.º 13
0
def main():
    parser = argparse.ArgumentParser(
        description="Merge two crashreporter symbols directories."
    )
    parser.add_argument("dir1", help="Directory")
    parser.add_argument("dir2", help="Directory to merge")

    options = parser.parse_args()

    dir1_finder = FileFinder(options.dir1)
    dir2_finder = FileFinder(options.dir2)
    finder = UnifiedSymbolsFinder(dir1_finder, dir2_finder)

    copier = FileCopier()
    with errors.accumulate():
        for p, f in finder:
            copier.add(p, f)

    copier.copy(options.dir1, skip_if_older=False)
Exemplo n.º 14
0
    def test_no_remove_empty_directories(self):
        copier = FileCopier()
        copier.add("foo", GeneratedFile("foo"))

        with open(self.tmppath("bar"), "a"):
            pass

        os.mkdir(self.tmppath("emptydir"))
        d = self.tmppath("populateddir")
        os.mkdir(d)

        with open(self.tmppath("populateddir/foo"), "a"):
            pass

        result = copier.copy(self.tmpdir, remove_unaccounted=False, remove_empty_directories=False)

        self.assertEqual(self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"]))
        self.assertEqual(self.all_dirs(self.tmpdir), set(["emptydir", "populateddir"]))
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories, set())
Exemplo n.º 15
0
    def test_permissions(self):
        """Ensure files without write permission can be deleted."""
        with open(self.tmppath("dummy"), "a"):
            pass

        p = self.tmppath("no_perms")
        with open(p, "a"):
            pass

        # Make file and directory unwritable. Reminder: making a directory
        # unwritable prevents modifications (including deletes) from the list
        # of files in that directory.
        os.chmod(p, 0o400)
        os.chmod(self.tmpdir, 0o400)

        copier = FileCopier()
        copier.add("dummy", GeneratedFile(b"content"))
        result = copier.copy(self.tmpdir)
        self.assertEqual(result.removed_files_count, 1)
        self.assertFalse(os.path.exists(p))
Exemplo n.º 16
0
    def test_permissions(self):
        """Ensure files without write permission can be deleted."""
        with open(self.tmppath('dummy'), 'a'):
            pass

        p = self.tmppath('no_perms')
        with open(p, 'a'):
            pass

        # Make file and directory unwritable. Reminder: making a directory
        # unwritable prevents modifications (including deletes) from the list
        # of files in that directory.
        os.chmod(p, 0400)
        os.chmod(self.tmpdir, 0400)

        copier = FileCopier()
        copier.add('dummy', GeneratedFile('content'))
        result = copier.copy(self.tmpdir)
        self.assertEqual(result.removed_files_count, 1)
        self.assertFalse(os.path.exists(p))
Exemplo n.º 17
0
    def test_no_remove(self):
        copier = FileCopier()
        copier.add("foo", GeneratedFile(b"foo"))

        with open(self.tmppath("bar"), "a"):
            pass

        os.mkdir(self.tmppath("emptydir"))
        d = self.tmppath("populateddir")
        os.mkdir(d)

        with open(self.tmppath("populateddir/foo"), "a"):
            pass

        result = copier.copy(self.tmpdir, remove_unaccounted=False)

        self.assertEqual(self.all_files(self.tmpdir),
                         set(["foo", "bar", "populateddir/foo"]))
        self.assertEqual(self.all_dirs(self.tmpdir), set(["populateddir"]))
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories,
                         set([self.tmppath("emptydir")]))
Exemplo n.º 18
0
    def test_remove_unaccounted_file_registry(self):
        """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""

        dest = self.tmppath("dest")

        copier = FileCopier()
        copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
        copier.add("foo/bar/qux", GeneratedFile(b"foobarqux"))
        copier.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))
        copier.add("foo/toto/tata", GeneratedFile(b"footototata"))

        os.makedirs(os.path.join(dest, "bar"))
        with open(os.path.join(dest, "bar", "bar"), "w") as fh:
            fh.write("barbar")
        os.makedirs(os.path.join(dest, "foo", "toto"))
        with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh:
            fh.write("foototototo")

        result = copier.copy(dest, remove_unaccounted=False)

        self.assertEqual(self.all_files(dest),
                         set(copier.paths()) | {"foo/toto/toto", "bar/bar"})
        self.assertEqual(self.all_dirs(dest),
                         {"foo/bar", "foo/hoge", "foo/toto", "bar"})

        copier2 = FileCopier()
        copier2.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))

        # We expect only files copied from the first copier to be removed,
        # not the extra file that was there beforehand.
        result = copier2.copy(dest, remove_unaccounted=copier)

        self.assertEqual(self.all_files(dest),
                         set(copier2.paths()) | {"foo/toto/toto", "bar/bar"})
        self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"})
        self.assertEqual(result.updated_files,
                         {self.tmppath("dest/foo/hoge/fuga")})
        self.assertEqual(result.existing_files, set())
        self.assertEqual(
            result.removed_files,
            {
                self.tmppath(p)
                for p in ("dest/foo/bar/baz", "dest/foo/bar/qux",
                          "dest/foo/toto/tata")
            },
        )
        self.assertEqual(result.removed_directories,
                         {self.tmppath("dest/foo/bar")})
Exemplo n.º 19
0
    def test_remove_unaccounted_file_registry(self):
        """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""

        dest = self.tmppath('dest')

        copier = FileCopier()
        copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
        copier.add('foo/bar/qux', GeneratedFile('foobarqux'))
        copier.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))
        copier.add('foo/toto/tata', GeneratedFile('footototata'))

        os.makedirs(os.path.join(dest, 'bar'))
        with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh:
            fh.write('barbar')
        os.makedirs(os.path.join(dest, 'foo', 'toto'))
        with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh:
            fh.write('foototototo')

        result = copier.copy(dest, remove_unaccounted=False)

        self.assertEqual(self.all_files(dest),
                         set(copier.paths()) | {'foo/toto/toto', 'bar/bar'})
        self.assertEqual(self.all_dirs(dest),
                         {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'})

        copier2 = FileCopier()
        copier2.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))

        # We expect only files copied from the first copier to be removed,
        # not the extra file that was there beforehand.
        result = copier2.copy(dest, remove_unaccounted=copier)

        self.assertEqual(self.all_files(dest),
                         set(copier2.paths()) | {'foo/toto/toto', 'bar/bar'})
        self.assertEqual(self.all_dirs(dest), {'foo/hoge', 'foo/toto', 'bar'})
        self.assertEqual(result.updated_files,
                         {self.tmppath('dest/foo/hoge/fuga')})
        self.assertEqual(result.existing_files, set())
        self.assertEqual(
            result.removed_files, {
                self.tmppath(p)
                for p in ('dest/foo/bar/baz', 'dest/foo/bar/qux',
                          'dest/foo/toto/tata')
            })
        self.assertEqual(result.removed_directories,
                         {self.tmppath('dest/foo/bar')})
Exemplo n.º 20
0
    def test_remove_unaccounted_file_registry(self):
        """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""

        dest = self.tmppath('dest')

        copier = FileCopier()
        copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))
        copier.add('foo/bar/qux', GeneratedFile('foobarqux'))
        copier.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))
        copier.add('foo/toto/tata', GeneratedFile('footototata'))

        os.makedirs(os.path.join(dest, 'bar'))
        with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh:
            fh.write('barbar');
        os.makedirs(os.path.join(dest, 'foo', 'toto'))
        with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh:
            fh.write('foototototo');

        result = copier.copy(dest, remove_unaccounted=False)

        self.assertEqual(self.all_files(dest),
                         set(copier.paths()) | { 'foo/toto/toto', 'bar/bar'})
        self.assertEqual(self.all_dirs(dest),
                         {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'})

        copier2 = FileCopier()
        copier2.add('foo/hoge/fuga', GeneratedFile('foohogefuga'))

        # We expect only files copied from the first copier to be removed,
        # not the extra file that was there beforehand.
        result = copier2.copy(dest, remove_unaccounted=copier)

        self.assertEqual(self.all_files(dest),
                         set(copier2.paths()) | { 'foo/toto/toto', 'bar/bar'})
        self.assertEqual(self.all_dirs(dest),
                         {'foo/hoge', 'foo/toto', 'bar'})
        self.assertEqual(result.updated_files,
                         {self.tmppath('dest/foo/hoge/fuga')})
        self.assertEqual(result.existing_files, set())
        self.assertEqual(result.removed_files, {self.tmppath(p) for p in
            ('dest/foo/bar/baz', 'dest/foo/bar/qux', 'dest/foo/toto/tata')})
        self.assertEqual(result.removed_directories,
                         {self.tmppath('dest/foo/bar')})
Exemplo n.º 21
0
    def test_remove_unaccounted_file_registry(self):
        """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""

        dest = self.tmppath("dest")

        copier = FileCopier()
        copier.add("foo/bar/baz", GeneratedFile("foobarbaz"))
        copier.add("foo/bar/qux", GeneratedFile("foobarqux"))
        copier.add("foo/hoge/fuga", GeneratedFile("foohogefuga"))
        copier.add("foo/toto/tata", GeneratedFile("footototata"))

        os.makedirs(os.path.join(dest, "bar"))
        with open(os.path.join(dest, "bar", "bar"), "w") as fh:
            fh.write("barbar")
        os.makedirs(os.path.join(dest, "foo", "toto"))
        with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh:
            fh.write("foototototo")

        result = copier.copy(dest, remove_unaccounted=False)

        self.assertEqual(self.all_files(dest), set(copier.paths()) | {"foo/toto/toto", "bar/bar"})
        self.assertEqual(self.all_dirs(dest), {"foo/bar", "foo/hoge", "foo/toto", "bar"})

        copier2 = FileCopier()
        copier2.add("foo/hoge/fuga", GeneratedFile("foohogefuga"))

        # We expect only files copied from the first copier to be removed,
        # not the extra file that was there beforehand.
        result = copier2.copy(dest, remove_unaccounted=copier)

        self.assertEqual(self.all_files(dest), set(copier2.paths()) | {"foo/toto/toto", "bar/bar"})
        self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"})
        self.assertEqual(result.updated_files, {self.tmppath("dest/foo/hoge/fuga")})
        self.assertEqual(result.existing_files, set())
        self.assertEqual(
            result.removed_files,
            {self.tmppath(p) for p in ("dest/foo/bar/baz", "dest/foo/bar/qux", "dest/foo/toto/tata")},
        )
        self.assertEqual(result.removed_directories, {self.tmppath("dest/foo/bar")})
Exemplo n.º 22
0
    def test_remove_unaccounted_directory_symlinks(self):
        """Directory symlinks in destination that are not in the way are
        deleted according to remove_unaccounted and
        remove_all_directory_symlinks.
        """
        if not self.symlink_supported:
            return

        dest = self.tmppath("dest")

        copier = FileCopier()
        copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))

        os.makedirs(self.tmppath("dest/foo"))
        dummy = self.tmppath("dummy")
        os.mkdir(dummy)

        os.mkdir(self.tmppath("dest/zot"))
        link = self.tmppath("dest/zot/zap")
        os.symlink(dummy, link)

        # If not remove_unaccounted but remove_empty_directories, then
        # the symlinked directory remains (as does its containing
        # directory).
        result = copier.copy(
            dest,
            remove_unaccounted=False,
            remove_empty_directories=True,
            remove_all_directory_symlinks=False,
        )

        st = os.lstat(link)
        self.assertTrue(stat.S_ISLNK(st.st_mode))
        self.assertFalse(stat.S_ISDIR(st.st_mode))

        self.assertEqual(self.all_files(dest), set(copier.paths()))
        self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))

        self.assertEqual(result.removed_directories, set())
        self.assertEqual(len(result.updated_files), 1)

        # If remove_unaccounted but not remove_empty_directories, then
        # only the symlinked directory is removed.
        result = copier.copy(
            dest,
            remove_unaccounted=True,
            remove_empty_directories=False,
            remove_all_directory_symlinks=False,
        )

        st = os.lstat(self.tmppath("dest/zot"))
        self.assertFalse(stat.S_ISLNK(st.st_mode))
        self.assertTrue(stat.S_ISDIR(st.st_mode))

        self.assertEqual(result.removed_files, set([link]))
        self.assertEqual(result.removed_directories, set())

        self.assertEqual(self.all_files(dest), set(copier.paths()))
        self.assertEqual(self.all_dirs(dest), set(["foo/bar", "zot"]))

        # If remove_unaccounted and remove_empty_directories, then
        # both the symlink and its containing directory are removed.
        link = self.tmppath("dest/zot/zap")
        os.symlink(dummy, link)

        result = copier.copy(
            dest,
            remove_unaccounted=True,
            remove_empty_directories=True,
            remove_all_directory_symlinks=False,
        )

        self.assertEqual(result.removed_files, set([link]))
        self.assertEqual(result.removed_directories,
                         set([self.tmppath("dest/zot")]))

        self.assertEqual(self.all_files(dest), set(copier.paths()))
        self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))
Exemplo n.º 23
0
    def test_file_copier(self):
        copier = FileCopier()
        copier.add("foo/bar", GeneratedFile("foobar"))
        copier.add("foo/qux", GeneratedFile("fooqux"))
        copier.add("foo/deep/nested/directory/file", GeneratedFile("fooz"))
        copier.add("bar", GeneratedFile("bar"))
        copier.add("qux/foo", GeneratedFile("quxfoo"))
        copier.add("qux/bar", GeneratedFile(""))

        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"]))

        self.assertEqual(result.updated_files, set(self.tmppath(p) for p in self.all_files(self.tmpdir)))
        self.assertEqual(result.existing_files, set())
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories, set())

        copier.remove("foo")
        copier.add("test", GeneratedFile("test"))
        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"]))
        self.assertEqual(
            result.removed_files, set(self.tmppath(p) for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file"))
        )
Exemplo n.º 24
0
    def test_file_copier(self):
        copier = FileCopier()
        copier.add("foo/bar", GeneratedFile(b"foobar"))
        copier.add("foo/qux", GeneratedFile(b"fooqux"))
        copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz"))
        copier.add("bar", GeneratedFile(b"bar"))
        copier.add("qux/foo", GeneratedFile(b"quxfoo"))
        copier.add("qux/bar", GeneratedFile(b""))

        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir),
                         set(["foo/deep/nested/directory", "qux"]))

        self.assertEqual(
            result.updated_files,
            set(self.tmppath(p) for p in self.all_files(self.tmpdir)),
        )
        self.assertEqual(result.existing_files, set())
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories, set())

        copier.remove("foo")
        copier.add("test", GeneratedFile(b"test"))
        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"]))
        self.assertEqual(
            result.removed_files,
            set(
                self.tmppath(p) for p in ("foo/bar", "foo/qux",
                                          "foo/deep/nested/directory/file")),
        )
Exemplo n.º 25
0
def main():
    parser = ArgumentParser()
    parser.add_argument('-D',
                        dest='defines',
                        action='append',
                        metavar="VAR[=VAL]",
                        help='Define a variable')
    parser.add_argument('--format',
                        default='omni',
                        help='Choose the chrome format for packaging ' +
                        '(omni, jar or flat ; default: %(default)s)')
    parser.add_argument('--removals',
                        default=None,
                        help='removed-files source file')
    parser.add_argument('--ignore-errors',
                        action='store_true',
                        default=False,
                        help='Transform errors into warnings.')
    parser.add_argument('--minify',
                        action='store_true',
                        default=False,
                        help='Make some files more compact while packaging')
    parser.add_argument('--minify-js',
                        action='store_true',
                        help='Minify JavaScript files while packaging.')
    parser.add_argument('--js-binary',
                        help='Path to js binary. This is used to verify '
                        'minified JavaScript. If this is not defined, '
                        'minification verification will not be performed.')
    parser.add_argument('--jarlog',
                        default='',
                        help='File containing jar ' + 'access logs')
    parser.add_argument('--optimizejars',
                        action='store_true',
                        default=False,
                        help='Enable jar optimizations')
    parser.add_argument('--unify',
                        default='',
                        help='Base directory of another build to unify with')
    parser.add_argument('manifest',
                        default=None,
                        nargs='?',
                        help='Manifest file name')
    parser.add_argument('source', help='Source directory')
    parser.add_argument('destination', help='Destination directory')
    parser.add_argument('--non-resource',
                        nargs='+',
                        metavar='PATTERN',
                        default=[],
                        help='Extra files not to be considered as resources')
    args = parser.parse_args()

    defines = dict(buildconfig.defines)
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    copier = FileCopier()
    if args.format == 'flat':
        formatter = FlatFormatter(copier)
    elif args.format == 'jar':
        formatter = JarFormatter(copier, optimize=args.optimizejars)
    elif args.format == 'omni':
        formatter = OmniJarFormatter(copier,
                                     buildconfig.substs['OMNIJAR_NAME'],
                                     optimize=args.optimizejars,
                                     non_resources=args.non_resource)
    else:
        errors.fatal('Unknown format: %s' % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines['MOZ_OMNIJAR'] = 1
    elif 'MOZ_OMNIJAR' in defines:
        del defines['MOZ_OMNIJAR']

    binpath = ''
    if 'BINPATH' in defines:
        binpath = SimpleManifestSink.normalize_path(defines['BINPATH'])
    while binpath.startswith('/'):
        binpath = binpath[1:]

    if args.unify:

        def is_native(path):
            path = os.path.abspath(path)
            return platform.machine() in mozpack.path.split(path)

        # Invert args.unify and args.source if args.unify points to the
        # native architecture.
        args.source, args.unify = sorted([args.source, args.unify],
                                         key=is_native,
                                         reverse=True)
        if is_native(args.source):
            launcher.tooldir = args.source
    elif not buildconfig.substs['CROSS_COMPILE']:
        launcher.tooldir = buildconfig.substs['LIBXUL_DIST']

    with errors.accumulate():
        finder_args = dict(
            minify=args.minify,
            minify_js=args.minify_js,
        )
        if args.js_binary:
            finder_args['minify_js_verify_command'] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)),
                             'js-compare-ast.js')
            ]
        if args.unify:
            finder = UnifiedBuildFinder(FileFinder(args.source),
                                        FileFinder(args.unify), **finder_args)
        else:
            finder = FileFinder(args.source, **finder_args)
        if 'NO_PKG_FILES' in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter, args.manifest
                                              is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(''), 'bin/*')
        sink.close(args.manifest is not None)

        if args.removals:
            lines = [l.lstrip() for l in open(args.removals).readlines()]
            removals_in = StringIO(''.join(lines))
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpack.path.join(binpath, 'removed-files'), removals)

    # shlibsign libraries
    if launcher.can_launch():
        for lib in SIGN_LIBS:
            libbase = mozpack.path.join(binpath, '%s%s') \
                % (buildconfig.substs['DLL_PREFIX'], lib)
            libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
            if copier.contains(libname):
                copier.add(
                    libbase + '.chk',
                    LibSignFile(os.path.join(args.destination, libname)))

    # Setup preloading
    if args.jarlog and os.path.exists(args.jarlog):
        from mozpack.mozjar import JarLog
        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            key = JarLog.canonicalize(os.path.join(args.destination, p))
            if key in log:
                f.preload(log[key])

    # Fill startup cache
    if isinstance(formatter, OmniJarFormatter) and launcher.can_launch() \
      and buildconfig.substs['MOZ_DISABLE_STARTUPCACHE'] != '1':
        if buildconfig.substs['LIBXUL_SDK']:
            gre_path = mozpack.path.join(buildconfig.substs['LIBXUL_DIST'],
                                         'bin')
        else:
            gre_path = None
        for base in sorted([[
                p for p in [mozpack.path.join('bin', b), b]
                if os.path.exists(os.path.join(args.source, p))
        ][0] for b in sink.packager.get_bases()]):
            if not gre_path:
                gre_path = base
            base_path = sink.normalize_path(base)
            if base_path in formatter.omnijars:
                precompile_cache(formatter.omnijars[base_path], args.source,
                                 gre_path, base)

    copier.copy(args.destination)
    generate_precomplete(
        os.path.normpath(os.path.join(args.destination, binpath)))
Exemplo n.º 26
0
    def test_file_copier(self):
        copier = FileCopier()
        copier.add('foo/bar', GeneratedFile('foobar'))
        copier.add('foo/qux', GeneratedFile('fooqux'))
        copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
        copier.add('bar', GeneratedFile('bar'))
        copier.add('qux/foo', GeneratedFile('quxfoo'))
        copier.add('qux/bar', GeneratedFile(''))

        copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir),
                         set(['foo/deep/nested/directory', 'qux']))

        copier.remove('foo')
        copier.add('test', GeneratedFile('test'))
        copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
Exemplo n.º 27
0
    def test_file_copier(self):
        copier = FileCopier()
        copier.add('foo/bar', GeneratedFile('foobar'))
        copier.add('foo/qux', GeneratedFile('fooqux'))
        copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
        copier.add('bar', GeneratedFile('bar'))
        copier.add('qux/foo', GeneratedFile('quxfoo'))
        copier.add('qux/bar', GeneratedFile(''))

        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir),
                         set(['foo/deep/nested/directory', 'qux']))

        self.assertEqual(
            result.updated_files,
            set(self.tmppath(p) for p in self.all_files(self.tmpdir)))
        self.assertEqual(result.existing_files, set())
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories, set())

        copier.remove('foo')
        copier.add('test', GeneratedFile('test'))
        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
        self.assertEqual(
            result.removed_files,
            set(
                self.tmppath(p) for p in ('foo/bar', 'foo/qux',
                                          'foo/deep/nested/directory/file')))
Exemplo n.º 28
0
    def test_file_copier(self):
        copier = FileCopier()
        copier.add('foo/bar', GeneratedFile('foobar'))
        copier.add('foo/qux', GeneratedFile('fooqux'))
        copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
        copier.add('bar', GeneratedFile('bar'))
        copier.add('qux/foo', GeneratedFile('quxfoo'))
        copier.add('qux/bar', GeneratedFile(''))

        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir),
                         set(['foo/deep/nested/directory', 'qux']))

        self.assertEqual(result.updated_files, set(self.tmppath(p) for p in
            self.all_files(self.tmpdir)))
        self.assertEqual(result.existing_files, set())
        self.assertEqual(result.removed_files, set())
        self.assertEqual(result.removed_directories, set())

        copier.remove('foo')
        copier.add('test', GeneratedFile('test'))
        result = copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
        self.assertEqual(result.removed_files, set(self.tmppath(p) for p in
            ('foo/bar', 'foo/qux', 'foo/deep/nested/directory/file')))
Exemplo n.º 29
0
def fat_aar(distdir,
            aars_paths,
            no_process=False,
            no_compatibility_check=False):
    if no_process:
        print('Not processing architecture-specific artifact Maven AARs.')
        return 0

    # Map {filename: {fingerprint: [arch1, arch2, ...]}}.
    diffs = defaultdict(lambda: defaultdict(list))
    missing_arch_prefs = set()
    # Collect multi-architecture inputs to the fat AAR.
    copier = FileCopier()

    for arch, aar_path in aars_paths.items():
        # Map old non-architecture-specific path to new architecture-specific path.
        old_rewrite_map = {
            'greprefs.js':
            '{}/greprefs.js'.format(arch),
            'defaults/pref/geckoview-prefs.js':
            'defaults/pref/{}/geckoview-prefs.js'.format(arch),
        }

        # Architecture-specific preferences files.
        arch_prefs = set(old_rewrite_map.values())
        missing_arch_prefs |= set(arch_prefs)

        jar_finder = JarFinder(aar_path, JarReader(aar_path))
        for path, fileobj in UnpackFinder(jar_finder):
            # Native libraries go straight through.
            if mozpath.match(path, 'jni/**'):
                copier.add(path, fileobj)

            elif path in arch_prefs:
                copier.add(path, fileobj)

            elif path in ('classes.jar', 'annotations.zip'):
                # annotations.zip differs due to timestamps, but the contents should not.

                # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt,
                # and it's not worth working around, so we use Python's zip functionality
                # instead.
                z = ZipFile(BytesIO(fileobj.open().read()))
                for r in z.namelist():
                    fingerprint = sha1(z.open(r).read()).hexdigest()
                    diffs['{}!/{}'.format(path, r)][fingerprint].append(arch)

            else:
                fingerprint = sha1(fileobj.open().read()).hexdigest()
                # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here,
                # since in practice they will never overlap.
                diffs[path][fingerprint].append(arch)

            missing_arch_prefs.discard(path)

    # Some differences are allowed across the architecture-specific AARs.  We could allow-list
    # the actual content, but it's not necessary right now.
    allow_pattern_list = {
        'AndroidManifest.xml',  # Min SDK version is different for 32- and 64-bit builds.
        'classes.jar!/org/mozilla/gecko/util/HardwareUtils.class',  # Min SDK as well.
        'classes.jar!/org/mozilla/geckoview/BuildConfig.class',
        # Each input captures its CPU architecture.
        'chrome/toolkit/content/global/buildconfig.html',
        # Bug 1556162: localized resources are not deterministic across
        # per-architecture builds triggered from the same push.
        '**/*.ftl',
        '**/*.dtd',
        '**/*.properties',
    }

    not_allowed = OrderedDict()

    def format_diffs(ds):
        # Like '  armeabi-v7a, arm64-v8a -> XXX\n  x86, x86_64 -> YYY'.
        return '\n'.join(
            sorted('  {archs} -> {fingerprint}'.format(
                archs=', '.join(sorted(archs)), fingerprint=fingerprint)
                   for fingerprint, archs in ds.iteritems()))

    for p, ds in sorted(diffs.iteritems()):
        if len(ds) <= 1:
            # Only one hash across all inputs: roll on.
            continue

        if any(mozpath.match(p, pat) for pat in allow_pattern_list):
            print(
                'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
                .format(path=p, ds_repr=format_diffs(ds)))
            continue

        not_allowed[p] = ds

    for p, ds in not_allowed.iteritems():
        print(
            'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
            .format(path=p, ds_repr=format_diffs(ds)))

    for missing in sorted(missing_arch_prefs):
        print(
            'Disallowed: Inputs missing expected architecture-specific input: {missing}'
            .format(missing=missing))

    if not no_compatibility_check and (missing_arch_prefs or not_allowed):
        return 1

    output_dir = mozpath.join(distdir, 'output')
    copier.copy(output_dir)

    return 0
Exemplo n.º 30
0
def main():
    parser = ArgumentParser()
    parser.add_argument('-D', dest='defines', action='append',
                        metavar="VAR[=VAL]", help='Define a variable')
    parser.add_argument('--format', default='omni',
                        help='Choose the chrome format for packaging ' +
                        '(omni, jar or flat ; default: %(default)s)')
    parser.add_argument('--removals', default=None,
                        help='removed-files source file')
    parser.add_argument('--ignore-errors', action='store_true', default=False,
                        help='Transform errors into warnings.')
    parser.add_argument('--minify', action='store_true', default=False,
                        help='Make some files more compact while packaging')
    parser.add_argument('--jarlog', default='', help='File containing jar ' +
                        'access logs')
    parser.add_argument('--optimizejars', action='store_true', default=False,
                        help='Enable jar optimizations')
    parser.add_argument('--unify', default='',
                        help='Base directory of another build to unify with')
    parser.add_argument('manifest', default=None, nargs='?',
                        help='Manifest file name')
    parser.add_argument('source', help='Source directory')
    parser.add_argument('destination', help='Destination directory')
    parser.add_argument('--non-resource', nargs='+', metavar='PATTERN',
                        default=[],
                        help='Extra files not to be considered as resources')
    args = parser.parse_args()

    defines = dict(buildconfig.defines)
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    copier = FileCopier()
    if args.format == 'flat':
        formatter = FlatFormatter(copier)
    elif args.format == 'jar':
        formatter = JarFormatter(copier, optimize=args.optimizejars)
    elif args.format == 'omni':
        formatter = OmniJarFormatter(copier,
                                     buildconfig.substs['OMNIJAR_NAME'],
                                     optimize=args.optimizejars,
                                     non_resources=args.non_resource)
    else:
        errors.fatal('Unknown format: %s' % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines['MOZ_OMNIJAR'] = 1
    elif 'MOZ_OMNIJAR' in defines:
        del defines['MOZ_OMNIJAR']

    binpath = ''
    if 'BINPATH' in defines:
        binpath = SimpleManifestSink.normalize_path(defines['BINPATH'])
    while binpath.startswith('/'):
        binpath = binpath[1:]

    if args.unify:
        def is_native(path):
            path = os.path.abspath(path)
            return platform.machine() in mozpack.path.split(path)

        # Invert args.unify and args.source if args.unify points to the
        # native architecture.
        args.source, args.unify = sorted([args.source, args.unify],
                                         key=is_native, reverse=True)
        if is_native(args.source):
            launcher.tooldir = args.source
    elif not buildconfig.substs['CROSS_COMPILE']:
        launcher.tooldir = buildconfig.substs['LIBXUL_DIST']

    with errors.accumulate():
        if args.unify:
            finder = UnifiedBuildFinder(FileFinder(args.source),
                                        FileFinder(args.unify),
                                        minify=args.minify)
        else:
            finder = FileFinder(args.source, minify=args.minify)
        if 'NO_PKG_FILES' in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter,
                                              args.manifest is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(''), 'bin/*')
        sink.close(args.manifest is not None)

        if args.removals:
            lines = [l.lstrip() for l in open(args.removals).readlines()]
            removals_in = StringIO(''.join(lines))
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpack.path.join(binpath, 'removed-files'), removals)

    # shlibsign libraries
    if launcher.can_launch():
        for lib in SIGN_LIBS:
            libbase = mozpack.path.join(binpath, '%s%s') \
                % (buildconfig.substs['DLL_PREFIX'], lib)
            libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
            if copier.contains(libname):
                copier.add(libbase + '.chk',
                           LibSignFile(os.path.join(args.destination,
                                                    libname)))

    # Setup preloading
    if args.jarlog and os.path.exists(args.jarlog):
        from mozpack.mozjar import JarLog
        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            key = JarLog.canonicalize(os.path.join(args.destination, p))
            if key in log:
                f.preload(log[key])

    # Fill startup cache on Windows and Linux only
    # (this currently causes build failure on BSD, so skip on that platfom)
    if sys.platform == 'win32' or sys.platform.startswith ('linux'):
      if isinstance(formatter, OmniJarFormatter) and launcher.can_launch():
          if buildconfig.substs['LIBXUL_SDK']:
              gre_path = mozpack.path.join(buildconfig.substs['LIBXUL_DIST'],
                                           'bin')
          else:
              gre_path = None
          for base in sorted([[p for p in [mozpack.path.join('bin', b), b]
                              if os.path.exists(os.path.join(args.source, p))][0]
                             for b in sink.packager.get_bases()]):
              if not gre_path:
                  gre_path = base
              base_path = sink.normalize_path(base)
              if base_path in formatter.omnijars:
                  precompile_cache(formatter.omnijars[base_path],
                                   args.source, gre_path, base)

    copier.copy(args.destination)
    generate_precomplete(os.path.normpath(os.path.join(args.destination,
                                                       binpath)))
Exemplo n.º 31
0
    def test_file_copier(self):
        copier = FileCopier()
        copier.add('foo/bar', GeneratedFile('foobar'))
        copier.add('foo/qux', GeneratedFile('fooqux'))
        copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
        copier.add('bar', GeneratedFile('bar'))
        copier.add('qux/foo', GeneratedFile('quxfoo'))
        copier.add('qux/bar', GeneratedFile(''))

        copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir),
                         set(['foo/deep/nested/directory', 'qux']))

        copier.remove('foo')
        copier.add('test', GeneratedFile('test'))
        copier.copy(self.tmpdir)
        self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
        self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
Exemplo n.º 32
0
def repackage_msix(
    dir_or_package,
    channel=None,
    branding=None,
    template=None,
    distribution_dirs=[],
    locale_allowlist=set(),
    version=None,
    vendor=None,
    displayname=None,
    app_name="firefox",
    identity=None,
    publisher=None,
    publisher_display_name="Mozilla Corporation",
    arch=None,
    output=None,
    force=False,
    log=None,
    verbose=False,
    makeappx=None,
):
    if not channel:
        raise Exception("channel is required")
    if channel not in ["official", "beta", "aurora", "nightly", "unofficial"]:
        raise Exception("channel is unrecognized: {}".format(channel))

    if not branding:
        raise Exception("branding dir is required")
    if not os.path.isdir(branding):
        raise Exception("branding dir {} does not exist".format(branding))

    # TODO: maybe we can fish this from the package directly?  Maybe from a DLL,
    # maybe from application.ini?
    if arch is None or arch not in _MSIX_ARCH.keys():
        raise Exception(
            "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys())
        )

    if not os.path.exists(dir_or_package):
        raise Exception("{} does not exist".format(dir_or_package))

    if (
        os.path.isfile(dir_or_package)
        and os.path.splitext(dir_or_package)[1] == ".msix"
    ):
        # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
        msix_dir = mozpath.normsep(
            mozpath.join(
                get_state_dir(),
                "cache",
                "mach-msix",
                "msix-unpack",
            )
        )

        if os.path.exists(msix_dir):
            shutil.rmtree(msix_dir)
        ensureParentDir(msix_dir)

        dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose)

    log(
        logging.INFO,
        "msix",
        {
            "input": dir_or_package,
        },
        "Adding files from '{input}'",
    )

    if os.path.isdir(dir_or_package):
        finder = FileFinder(dir_or_package)
    else:
        finder = JarFinder(dir_or_package, JarReader(dir_or_package))

    values = get_application_ini_values(
        finder,
        dict(section="App", value="CodeName", fallback="Name"),
        dict(section="App", value="Vendor"),
    )
    first = next(values)
    displayname = displayname or "Mozilla {}".format(first)
    second = next(values)
    vendor = vendor or second

    # For `AppConstants.jsm` and `brand.properties`, which are in the omnijar in packaged builds.
    # The nested langpack XPI files can't be read by `mozjar.py`.
    unpack_finder = UnpackFinder(finder, unpack_xpi=False)

    if not version:
        values = get_appconstants_jsm_values(
            unpack_finder, "MOZ_APP_VERSION_DISPLAY", "MOZ_BUILDID"
        )
        display_version = next(values)
        buildid = next(values)
        version = get_embedded_version(display_version, buildid)
        log(
            logging.INFO,
            "msix",
            {
                "version": version,
                "display_version": display_version,
                "buildid": buildid,
            },
            "AppConstants.jsm display version is '{display_version}' and build ID is '{buildid}':"
            + " embedded version will be '{version}'",
        )

    # TODO: Bug 1721922: localize this description via Fluent.
    lines = []
    for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"):
        lines.extend(
            line
            for line in f.open().read().decode("utf-8").splitlines()
            if "brandFullName" in line
        )
    (brandFullName,) = lines  # We expect exactly one definition.
    _, _, brandFullName = brandFullName.partition("=")
    brandFullName = brandFullName.strip()

    # We don't have a build at repackage-time to gives us this value, and the
    # source of truth is a branding-specific `configure.sh` shell script that we
    # can't easily evaluate completely here.  Instead, we take the last value
    # from `configure.sh`.
    lines = [
        line
        for line in open(mozpath.join(branding, "configure.sh")).readlines()
        if "MOZ_IGECKOBACKCHANNEL_IID" in line
    ]
    MOZ_IGECKOBACKCHANNEL_IID = lines[-1]
    _, _, MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.partition("=")
    MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.strip()
    if MOZ_IGECKOBACKCHANNEL_IID.startswith(('"', "'")):
        MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID[1:-1]

    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    output_dir = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel)
        )
    )

    if channel == "beta":
        # Release (official) and Beta share branding.  Differentiate Beta a little bit.
        displayname += " Beta"
        brandFullName += " Beta"

    # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta
    # Package Root'.  This is `BrandFullName` in the installer, and we want to
    # be close but to not match.  By not matching, we hope to prevent confusion
    # and/or errors between regularly installed builds and App Package builds.
    instdir = "{} Package Root".format(displayname)

    # The standard package name is like "CompanyNoSpaces.ProductNoSpaces".
    identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "")

    # We might want to include the publisher ID hash here.  I.e.,
    # "__{publisherID}".  My locally produced MSIX was named like
    # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a
    # missing field, but it's not necessary, since this is just an output file name.
    package_output_name = "{identity}_{version}_{arch}".format(
        identity=identity, version=version, arch=_MSIX_ARCH[arch]
    )
    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    default_output = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name)
        )
    )
    output = output or default_output
    log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}")

    m = InstallManifest()
    m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri")

    m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets")
    m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS")

    copier = FileCopier()

    # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead.
    for p, f in finder:
        if not os.path.isdir(dir_or_package):
            # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe".
            pp = os.path.relpath(p, "firefox")
        else:
            # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already.
            pp = p

        if pp.startswith("distribution"):
            # Treat any existing distribution as a distribution directory,
            # potentially with language packs. This makes it easy to repack
            # unpacked MSIXes.
            distribution_dir = mozpath.join(dir_or_package, "distribution")
            if distribution_dir not in distribution_dirs:
                distribution_dirs.append(distribution_dir)

            continue

        copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f)

    # Locales to declare as supported in `AppxManifest.xml`.
    locales = set(["en-US"])

    for distribution_dir in [
        mozpath.join(template, "distribution")
    ] + distribution_dirs:
        log(
            logging.INFO,
            "msix",
            {"dir": distribution_dir},
            "Adding distribution files from {dir}",
        )

        # In automation, we have no easy way to remap the names of artifacts fetched from dependent
        # tasks.  In particular, langpacks will be named like `target.langpack.xpi`.  The fetch
        # tasks do allow us to put them in a per-locale directory, so that the entire set can be
        # fetched.  Here we remap the names.
        finder = FileFinder(distribution_dir)

        for p, f in finder:
            locale = None
            if os.path.basename(p) == "target.langpack.xpi":
                # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE".  This is how langpacks
                # are presented in CI.
                base, locale = os.path.split(os.path.dirname(p))

                # Like "locale-LOCALE/[email protected]".  This is what AMO
                # serves and how flatpak builds name langpacks, but not how snap builds name
                # langpacks.  I can't explain the discrepancy.
                dest = mozpath.normsep(
                    mozpath.join(
                        base,
                        f"locale-{locale}",
                        f"langpack-{locale}@firefox.mozilla.org.xpi",
                    )
                )

                log(
                    logging.DEBUG,
                    "msix",
                    {"path": p, "dest": dest},
                    "Renaming langpack {path} to {dest}",
                )

            elif os.path.basename(p).startswith("langpack-"):
                # Turn "/path/to/[email protected]" into "LOCALE".  This is
                # how langpacks are presented from an unpacked MSIX.
                _, _, locale = os.path.basename(p).partition("langpack-")
                locale, _, _ = locale.partition("@")
                dest = p

            else:
                dest = p

            if locale:
                locale = locale.strip().lower()
                locales.add(locale)
                log(
                    logging.DEBUG,
                    "msix",
                    {"locale": locale, "dest": dest},
                    "Distributing locale '{locale}' from {dest}",
                )

            dest = mozpath.normsep(
                mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest)
            )
            if copier.contains(dest):
                log(
                    logging.INFO,
                    "msix",
                    {"dest": dest, "path": mozpath.join(finder.base, p)},
                    "Skipping duplicate: {dest} from {path}",
                )
                continue

            log(
                logging.DEBUG,
                "msix",
                {"dest": dest, "path": mozpath.join(finder.base, p)},
                "Adding distribution path: {dest} from {path}",
            )

            copier.add(
                dest,
                f,
            )

    locales.remove("en-US")

    # Windows MSIX packages support a finite set of locales: see
    # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in
    # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales.
    # We distribute all of the langpacks supported by the release channel in our MSIX, which is
    # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales.  But we
    # only advertise support in the App manifest for the intersection of that set and the set of
    # supported locales.
    #
    # We distribute all langpacks to avoid the following issue.  Suppose a user manually installs a
    # langpack that is not supported by Windows, and then updates the installed MSIX package.  MSIX
    # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to
    # update the langpack before the update.  But, since all langpacks are bundled with the MSIX,
    # that langpack will be up-to-date, preventing one class of YSOD.
    unadvertised = set()
    if locale_allowlist:
        unadvertised = locales - locale_allowlist
        locales = locales & locale_allowlist
    for locale in sorted(unadvertised):
        log(
            logging.INFO,
            "msix",
            {"locale": locale},
            "Not advertising distributed locale '{locale}' that is not recognized by Windows",
        )

    locales = ["en-US"] + list(sorted(locales))
    resource_language_list = "\n".join(
        f'    <Resource Language="{locale}" />' for locale in sorted(locales)
    )

    defines = {
        "APPX_ARCH": _MSIX_ARCH[arch],
        "APPX_DISPLAYNAME": brandFullName,
        "APPX_DESCRIPTION": brandFullName,
        # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or
        # 'Mozilla.MozillaFirefoxNightly'.
        "APPX_IDENTITY": identity,
        # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox
        # Beta Package Root'.  See above.
        "APPX_INSTDIR": instdir,
        # Like 'Firefox%20Package%20Root'.
        "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir),
        "APPX_PUBLISHER": publisher,
        "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name,
        "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list,
        "APPX_VERSION": version,
        "MOZ_APP_DISPLAYNAME": displayname,
        "MOZ_APP_NAME": app_name,
        "MOZ_IGECKOBACKCHANNEL_IID": MOZ_IGECKOBACKCHANNEL_IID,
    }

    m.add_preprocess(
        mozpath.join(template, "AppxManifest.xml.in"),
        "AppxManifest.xml",
        [],
        defines=defines,
        marker="<!-- #",  # So that we can have well-formed XML.
    )
    m.populate_registry(copier)

    output_dir = mozpath.abspath(output_dir)
    ensureParentDir(output_dir)

    start = time.time()
    result = copier.copy(
        output_dir, remove_empty_directories=True, skip_if_older=not force
    )
    if log:
        log_copy_result(log, time.time() - start, output_dir, result)

    if verbose:
        # Dump AppxManifest.xml contents for ease of debugging.
        log(logging.DEBUG, "msix", {}, "AppxManifest.xml")
        log(logging.DEBUG, "msix", {}, ">>>")
        for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines():
            log(logging.DEBUG, "msix", {}, line[:-1])  # Drop trailing line terminator.
        log(logging.DEBUG, "msix", {}, "<<<")

    if not makeappx:
        makeappx = find_sdk_tool("makeappx.exe", log=log)
    if not makeappx:
        raise ValueError(
            "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH"
        )

    # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix`
    # supports only hyphen style.  `makeappx.exe` allows to overwrite and to
    # provide more feedback, so we prefer invoking with these flags.  This will
    # also accommodate `wine makeappx.exe`.
    stdout = subprocess.run(
        [makeappx], check=False, capture_output=True, universal_newlines=True
    ).stdout
    is_makeappx = "MakeAppx Tool" in stdout

    if is_makeappx:
        args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"]
    else:
        args = [makeappx, "pack", "-d", output_dir, "-p", output]
    if verbose and is_makeappx:
        args.append("/verbose")
    joined = " ".join(shlex_quote(arg) for arg in args)
    log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}")

    sys.stdout.flush()  # Otherwise the subprocess output can be interleaved.
    if verbose:
        subprocess.check_call(args, universal_newlines=True)
    else:
        # Suppress output unless we fail.
        try:
            subprocess.check_output(args, universal_newlines=True)
        except subprocess.CalledProcessError as e:
            sys.stderr.write(e.output)
            raise

    return output
Exemplo n.º 33
0
def main():
    parser = ArgumentParser()
    parser.add_argument(
        "-D",
        dest="defines",
        action="append",
        metavar="VAR[=VAL]",
        help="Define a variable",
    )
    parser.add_argument(
        "--format",
        default="omni",
        help="Choose the chrome format for packaging " +
        "(omni, jar or flat ; default: %(default)s)",
    )
    parser.add_argument("--removals",
                        default=None,
                        help="removed-files source file")
    parser.add_argument(
        "--ignore-errors",
        action="store_true",
        default=False,
        help="Transform errors into warnings.",
    )
    parser.add_argument(
        "--ignore-broken-symlinks",
        action="store_true",
        default=False,
        help="Do not fail when processing broken symlinks.",
    )
    parser.add_argument(
        "--minify",
        action="store_true",
        default=False,
        help="Make some files more compact while packaging",
    )
    parser.add_argument(
        "--minify-js",
        action="store_true",
        help="Minify JavaScript files while packaging.",
    )
    parser.add_argument(
        "--js-binary",
        help="Path to js binary. This is used to verify "
        "minified JavaScript. If this is not defined, "
        "minification verification will not be performed.",
    )
    parser.add_argument("--jarlog",
                        default="",
                        help="File containing jar " + "access logs")
    parser.add_argument(
        "--compress",
        choices=("none", "deflate"),
        default="deflate",
        help="Use given jar compression (default: deflate)",
    )
    parser.add_argument("manifest",
                        default=None,
                        nargs="?",
                        help="Manifest file name")
    parser.add_argument("source", help="Source directory")
    parser.add_argument("destination", help="Destination directory")
    parser.add_argument(
        "--non-resource",
        nargs="+",
        metavar="PATTERN",
        default=[],
        help="Extra files not to be considered as resources",
    )
    args = parser.parse_args()

    defines = dict(buildconfig.defines["ALLDEFINES"])
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    compress = {
        "none": False,
        "deflate": True,
    }[args.compress]

    copier = FileCopier()
    if args.format == "flat":
        formatter = FlatFormatter(copier)
    elif args.format == "jar":
        formatter = JarFormatter(copier, compress=compress)
    elif args.format == "omni":
        formatter = OmniJarFormatter(
            copier,
            buildconfig.substs["OMNIJAR_NAME"],
            compress=compress,
            non_resources=args.non_resource,
        )
    else:
        errors.fatal("Unknown format: %s" % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines["MOZ_OMNIJAR"] = 1
    elif "MOZ_OMNIJAR" in defines:
        del defines["MOZ_OMNIJAR"]

    respath = ""
    if "RESPATH" in defines:
        respath = SimpleManifestSink.normalize_path(defines["RESPATH"])
    while respath.startswith("/"):
        respath = respath[1:]

    with errors.accumulate():
        finder_args = dict(
            minify=args.minify,
            minify_js=args.minify_js,
            ignore_broken_symlinks=args.ignore_broken_symlinks,
        )
        if args.js_binary:
            finder_args["minify_js_verify_command"] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)),
                             "js-compare-ast.js"),
            ]
        finder = PackagerFileFinder(args.source,
                                    find_executables=True,
                                    **finder_args)
        if "NO_PKG_FILES" in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter, args.manifest
                                              is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(""), "bin/*")
        sink.close(args.manifest is not None)

        if args.removals:
            removals_in = StringIO(open(args.removals).read())
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpath.join(respath, "removed-files"), removals)

    # If a pdb file is present and we were instructed to copy it, include it.
    # Run on all OSes to capture MinGW builds
    if buildconfig.substs.get("MOZ_COPY_PDBS"):
        # We want to mutate the copier while we're iterating through it, so copy
        # the items to a list first.
        copier_items = [(p, f) for p, f in copier]
        for p, f in copier_items:
            if isinstance(f, ExecutableFile):
                pdbname = os.path.splitext(f.inputs()[0])[0] + ".pdb"
                if os.path.exists(pdbname):
                    copier.add(os.path.basename(pdbname), File(pdbname))

    # Setup preloading
    if args.jarlog:
        if not os.path.exists(args.jarlog):
            raise Exception("Cannot find jar log: %s" % args.jarlog)
        omnijars = []
        if isinstance(formatter, OmniJarFormatter):
            omnijars = [
                mozpath.join(base, buildconfig.substs["OMNIJAR_NAME"])
                for base in sink.packager.get_bases(addons=False)
            ]

        from mozpack.mozjar import JarLog

        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            if respath:
                p = mozpath.relpath(p, respath)
            if p in log:
                f.preload(log[p])
            elif p in omnijars:
                raise Exception("No jar log data for %s" % p)

    copier.copy(args.destination)
    generate_precomplete(
        os.path.normpath(os.path.join(args.destination, respath)))
Exemplo n.º 34
0
    def test_remove_unaccounted_directory_symlinks(self):
        """Directory symlinks in destination that are not in the way are
        deleted according to remove_unaccounted and
        remove_all_directory_symlinks.
        """
        if not self.symlink_supported:
            return

        dest = self.tmppath('dest')

        copier = FileCopier()
        copier.add('foo/bar/baz', GeneratedFile('foobarbaz'))

        os.makedirs(self.tmppath('dest/foo'))
        dummy = self.tmppath('dummy')
        os.mkdir(dummy)

        os.mkdir(self.tmppath('dest/zot'))
        link = self.tmppath('dest/zot/zap')
        os.symlink(dummy, link)

        # If not remove_unaccounted but remove_empty_directories, then
        # the symlinked directory remains (as does its containing
        # directory).
        result = copier.copy(dest, remove_unaccounted=False,
            remove_empty_directories=True,
            remove_all_directory_symlinks=False)

        st = os.lstat(link)
        self.assertTrue(stat.S_ISLNK(st.st_mode))
        self.assertFalse(stat.S_ISDIR(st.st_mode))

        self.assertEqual(self.all_files(dest), set(copier.paths()))
        self.assertEqual(self.all_dirs(dest), set(['foo/bar']))

        self.assertEqual(result.removed_directories, set())
        self.assertEqual(len(result.updated_files), 1)

        # If remove_unaccounted but not remove_empty_directories, then
        # only the symlinked directory is removed.
        result = copier.copy(dest, remove_unaccounted=True,
            remove_empty_directories=False,
            remove_all_directory_symlinks=False)

        st = os.lstat(self.tmppath('dest/zot'))
        self.assertFalse(stat.S_ISLNK(st.st_mode))
        self.assertTrue(stat.S_ISDIR(st.st_mode))

        self.assertEqual(result.removed_files, set([link]))
        self.assertEqual(result.removed_directories, set())

        self.assertEqual(self.all_files(dest), set(copier.paths()))
        self.assertEqual(self.all_dirs(dest), set(['foo/bar', 'zot']))

        # If remove_unaccounted and remove_empty_directories, then
        # both the symlink and its containing directory are removed.
        link = self.tmppath('dest/zot/zap')
        os.symlink(dummy, link)

        result = copier.copy(dest, remove_unaccounted=True,
            remove_empty_directories=True,
            remove_all_directory_symlinks=False)

        self.assertEqual(result.removed_files, set([link]))
        self.assertEqual(result.removed_directories, set([self.tmppath('dest/zot')]))

        self.assertEqual(self.all_files(dest), set(copier.paths()))
        self.assertEqual(self.all_dirs(dest), set(['foo/bar']))
Exemplo n.º 35
0
    def _process_android_eclipse_project_data(self, data, srcdir, objdir):
        # This can't be relative to the environment's topsrcdir,
        # because during testing topsrcdir is faked.
        template_directory = os.path.abspath(mozpath.join(os.path.dirname(__file__),
            'templates', 'android_eclipse'))

        project_directory = mozpath.join(self.environment.topobjdir, 'android_eclipse', data.name)
        manifest_path = mozpath.join(self.environment.topobjdir, 'android_eclipse', '%s.manifest' % data.name)

        manifest = self._manifest_for_project(srcdir, data)
        ensureParentDir(manifest_path)
        manifest.write(path=manifest_path)

        classpathentries = []
        for cpe in sorted(data._classpathentries, key=lambda x: x.path):
            e = self._Element_for_classpathentry(cpe)
            classpathentries.append(ET.tostring(e))

        for name in sorted(data.referenced_projects):
            e = self._Element_for_referenced_project(name)
            classpathentries.append(ET.tostring(e))

        for name in sorted(data.extra_jars):
            e = self._Element_for_extra_jar(mozpath.join(srcdir, name))
            classpathentries.append(ET.tostring(e))

        defines = {}
        defines['IDE_OBJDIR'] = objdir
        defines['IDE_TOPOBJDIR'] = self.environment.topobjdir
        defines['IDE_SRCDIR'] = srcdir
        defines['IDE_TOPSRCDIR'] = self.environment.topsrcdir
        defines['IDE_PROJECT_NAME'] = data.name
        defines['IDE_PACKAGE_NAME'] = data.package_name
        defines['IDE_PROJECT_DIRECTORY'] = project_directory
        defines['IDE_RELSRCDIR'] = mozpath.relpath(srcdir, self.environment.topsrcdir)
        defines['IDE_CLASSPATH_ENTRIES'] = '\n'.join('\t' + cpe for cpe in classpathentries)
        defines['IDE_RECURSIVE_MAKE_TARGETS'] = ' '.join(sorted(data.recursive_make_targets))
        # Like android.library=true
        defines['IDE_PROJECT_LIBRARY_SETTING'] = 'android.library=true' if data.is_library else ''
        # Like android.library.reference.1=FennecBrandingResources
        defines['IDE_PROJECT_LIBRARY_REFERENCES'] = '\n'.join(
            'android.library.reference.%s=%s' % (i + 1, ref)
            for i, ref in enumerate(sorted(data.included_projects)))
        if data.filtered_resources:
            filteredResources = self._Element_for_filtered_resources(data.filtered_resources)
            defines['IDE_PROJECT_FILTERED_RESOURCES'] = pretty_print(filteredResources).strip()
        else:
            defines['IDE_PROJECT_FILTERED_RESOURCES'] = ''

        copier = FileCopier()
        finder = FileFinder(template_directory)
        for input_filename, f in itertools.chain(finder.find('**'), finder.find('.**')):
            if input_filename == 'AndroidManifest.xml' and not data.is_library:
                # Main projects supply their own manifests.
                continue
            copier.add(input_filename, PreprocessedFile(
                mozpath.join(finder.base, input_filename),
                depfile_path=None,
                marker='#',
                defines=defines,
                extra_depends={mozpath.join(finder.base, input_filename)}))

        # When we re-create the build backend, we kill everything that was there.
        if os.path.isdir(project_directory):
            self.summary.updated_count += 1
        else:
            self.summary.created_count += 1
        copier.copy(project_directory, skip_if_older=False, remove_unaccounted=True)
Exemplo n.º 36
0
def main():
    parser = ArgumentParser()
    parser.add_argument('-D', dest='defines', action='append',
                        metavar="VAR[=VAL]", help='Define a variable')
    parser.add_argument('--format', default='omni',
                        help='Choose the chrome format for packaging ' +
                        '(omni, jar or flat ; default: %(default)s)')
    parser.add_argument('--removals', default=None,
                        help='removed-files source file')
    parser.add_argument('--ignore-errors', action='store_true', default=False,
                        help='Transform errors into warnings.')
    parser.add_argument('--minify', action='store_true', default=False,
                        help='Make some files more compact while packaging')
    parser.add_argument('--minify-js', action='store_true',
                        help='Minify JavaScript files while packaging.')
    parser.add_argument('--js-binary',
                        help='Path to js binary. This is used to verify '
                        'minified JavaScript. If this is not defined, '
                        'minification verification will not be performed.')
    parser.add_argument('--jarlog', default='', help='File containing jar ' +
                        'access logs')
    parser.add_argument('--optimizejars', action='store_true', default=False,
                        help='Enable jar optimizations')
    parser.add_argument('--disable-compression', action='store_false',
                        dest='compress', default=True,
                        help='Disable jar compression')
    parser.add_argument('manifest', default=None, nargs='?',
                        help='Manifest file name')
    parser.add_argument('source', help='Source directory')
    parser.add_argument('destination', help='Destination directory')
    parser.add_argument('--non-resource', nargs='+', metavar='PATTERN',
                        default=[],
                        help='Extra files not to be considered as resources')
    args = parser.parse_args()

    defines = dict(buildconfig.defines)
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    copier = FileCopier()
    if args.format == 'flat':
        formatter = FlatFormatter(copier)
    elif args.format == 'jar':
        formatter = JarFormatter(copier, compress=args.compress, optimize=args.optimizejars)
    elif args.format == 'omni':
        formatter = OmniJarFormatter(copier,
                                     buildconfig.substs['OMNIJAR_NAME'],
                                     compress=args.compress,
                                     optimize=args.optimizejars,
                                     non_resources=args.non_resource)
    else:
        errors.fatal('Unknown format: %s' % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines['MOZ_OMNIJAR'] = 1
    elif 'MOZ_OMNIJAR' in defines:
        del defines['MOZ_OMNIJAR']

    respath = ''
    if 'RESPATH' in defines:
        respath = SimpleManifestSink.normalize_path(defines['RESPATH'])
    while respath.startswith('/'):
        respath = respath[1:]

    if not buildconfig.substs['CROSS_COMPILE']:
        launcher.tooldir = mozpath.join(buildconfig.topobjdir, 'dist')

    with errors.accumulate():
        finder_args = dict(
            minify=args.minify,
            minify_js=args.minify_js,
        )
        if args.js_binary:
            finder_args['minify_js_verify_command'] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)),
                    'js-compare-ast.js')
            ]
        finder = FileFinder(args.source, find_executables=True,
                            **finder_args)
        if 'NO_PKG_FILES' in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter,
                                              args.manifest is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(''), 'bin/*')
        sink.close(args.manifest is not None)

        if args.removals:
            removals_in = StringIO(open(args.removals).read())
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpath.join(respath, 'removed-files'), removals)

    # shlibsign libraries
    if launcher.can_launch():
        if not mozinfo.isMac and buildconfig.substs.get('COMPILE_ENVIRONMENT'):
            for lib in SIGN_LIBS:
                libbase = mozpath.join(respath, '%s%s') \
                    % (buildconfig.substs['DLL_PREFIX'], lib)
                libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
                if copier.contains(libname):
                    copier.add(libbase + '.chk',
                               LibSignFile(os.path.join(args.destination,
                                                        libname)))

    # Setup preloading
    if args.jarlog and os.path.exists(args.jarlog):
        from mozpack.mozjar import JarLog
        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            key = JarLog.canonicalize(os.path.join(args.destination, p))
            if key in log:
                f.preload(log[key])

    copier.copy(args.destination)
    generate_precomplete(os.path.normpath(os.path.join(args.destination,
                                                       respath)))
Exemplo n.º 37
0
    def _process_android_eclipse_project_data(self, data, srcdir, objdir):
        # This can't be relative to the environment's topsrcdir,
        # because during testing topsrcdir is faked.
        template_directory = os.path.abspath(
            mozpath.join(os.path.dirname(__file__), 'templates',
                         'android_eclipse'))

        project_directory = mozpath.join(self.environment.topobjdir,
                                         'android_eclipse', data.name)
        manifest_path = mozpath.join(self.environment.topobjdir,
                                     'android_eclipse',
                                     '%s.manifest' % data.name)

        manifest = self._manifest_for_project(srcdir, data)
        ensureParentDir(manifest_path)
        manifest.write(path=manifest_path)

        classpathentries = []
        for cpe in sorted(data._classpathentries, key=lambda x: x.path):
            e = self._Element_for_classpathentry(cpe)
            classpathentries.append(ET.tostring(e))

        for name in sorted(data.referenced_projects):
            e = self._Element_for_referenced_project(name)
            classpathentries.append(ET.tostring(e))

        for name in sorted(data.extra_jars):
            e = self._Element_for_extra_jar(mozpath.join(srcdir, name))
            classpathentries.append(ET.tostring(e))

        defines = {}
        defines['IDE_OBJDIR'] = objdir
        defines['IDE_TOPOBJDIR'] = self.environment.topobjdir
        defines['IDE_SRCDIR'] = srcdir
        defines['IDE_TOPSRCDIR'] = self.environment.topsrcdir
        defines['IDE_PROJECT_NAME'] = data.name
        defines['IDE_PACKAGE_NAME'] = data.package_name
        defines['IDE_PROJECT_DIRECTORY'] = project_directory
        defines['IDE_RELSRCDIR'] = mozpath.relpath(srcdir,
                                                   self.environment.topsrcdir)
        defines['IDE_CLASSPATH_ENTRIES'] = '\n'.join(
            '\t' + cpe for cpe in classpathentries)
        defines['IDE_RECURSIVE_MAKE_TARGETS'] = ' '.join(
            sorted(data.recursive_make_targets))
        # Like android.library=true
        defines[
            'IDE_PROJECT_LIBRARY_SETTING'] = 'android.library=true' if data.is_library else ''
        # Like android.library.reference.1=FennecBrandingResources
        defines['IDE_PROJECT_LIBRARY_REFERENCES'] = '\n'.join(
            'android.library.reference.%s=%s' % (i + 1, ref)
            for i, ref in enumerate(sorted(data.included_projects)))
        if data.filtered_resources:
            filteredResources = self._Element_for_filtered_resources(
                data.filtered_resources)
            defines['IDE_PROJECT_FILTERED_RESOURCES'] = pretty_print(
                filteredResources).strip()
        else:
            defines['IDE_PROJECT_FILTERED_RESOURCES'] = ''
        defines['ANDROID_TARGET_SDK'] = self.environment.substs[
            'ANDROID_TARGET_SDK']
        defines['MOZ_ANDROID_MIN_SDK_VERSION'] = self.environment.defines[
            'MOZ_ANDROID_MIN_SDK_VERSION']

        copier = FileCopier()
        finder = FileFinder(template_directory)
        for input_filename, f in itertools.chain(finder.find('**'),
                                                 finder.find('.**')):
            if input_filename == 'AndroidManifest.xml' and not data.is_library:
                # Main projects supply their own manifests.
                continue
            copier.add(
                input_filename,
                PreprocessedFile(
                    mozpath.join(finder.base, input_filename),
                    depfile_path=None,
                    marker='#',
                    defines=defines,
                    extra_depends={mozpath.join(finder.base, input_filename)}))

        # When we re-create the build backend, we kill everything that was there.
        if os.path.isdir(project_directory):
            self._updated_count += 1
        else:
            self._created_count += 1
        copier.copy(project_directory,
                    skip_if_older=False,
                    remove_unaccounted=True)
Exemplo n.º 38
0
def main():
    parser = ArgumentParser()
    parser.add_argument('-D', dest='defines', action='append',
                        metavar="VAR[=VAL]", help='Define a variable')
    parser.add_argument('--format', default='omni',
                        help='Choose the chrome format for packaging ' +
                        '(omni, jar or flat ; default: %(default)s)')
    parser.add_argument('--removals', default=None,
                        help='removed-files source file')
    parser.add_argument('--ignore-errors', action='store_true', default=False,
                        help='Transform errors into warnings.')
    parser.add_argument('--ignore-broken-symlinks', action='store_true', default=False,
                        help='Do not fail when processing broken symlinks.')
    parser.add_argument('--minify', action='store_true', default=False,
                        help='Make some files more compact while packaging')
    parser.add_argument('--minify-js', action='store_true',
                        help='Minify JavaScript files while packaging.')
    parser.add_argument('--js-binary',
                        help='Path to js binary. This is used to verify '
                        'minified JavaScript. If this is not defined, '
                        'minification verification will not be performed.')
    parser.add_argument('--jarlog', default='', help='File containing jar ' +
                        'access logs')
    parser.add_argument('--compress', choices=('none', 'deflate', 'brotli'),
                        default='deflate',
                        help='Use given jar compression (default: deflate)')
    parser.add_argument('manifest', default=None, nargs='?',
                        help='Manifest file name')
    parser.add_argument('source', help='Source directory')
    parser.add_argument('destination', help='Destination directory')
    parser.add_argument('--non-resource', nargs='+', metavar='PATTERN',
                        default=[],
                        help='Extra files not to be considered as resources')
    args = parser.parse_args()

    defines = dict(buildconfig.defines['ALLDEFINES'])
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    compress = {
        'none': False,
        'deflate': True,
        'brotli': JAR_BROTLI,
    }[args.compress]

    copier = FileCopier()
    if args.format == 'flat':
        formatter = FlatFormatter(copier)
    elif args.format == 'jar':
        formatter = JarFormatter(copier, compress=compress)
    elif args.format == 'omni':
        formatter = OmniJarFormatter(copier,
                                     buildconfig.substs['OMNIJAR_NAME'],
                                     compress=compress,
                                     non_resources=args.non_resource)
    else:
        errors.fatal('Unknown format: %s' % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines['MOZ_OMNIJAR'] = 1
    elif 'MOZ_OMNIJAR' in defines:
        del defines['MOZ_OMNIJAR']

    respath = ''
    if 'RESPATH' in defines:
        respath = SimpleManifestSink.normalize_path(defines['RESPATH'])
    while respath.startswith('/'):
        respath = respath[1:]

    if not buildconfig.substs['CROSS_COMPILE']:
        launcher.tooldir = mozpath.join(buildconfig.topobjdir, 'dist')

    with errors.accumulate():
        finder_args = dict(
            minify=args.minify,
            minify_js=args.minify_js,
            ignore_broken_symlinks=args.ignore_broken_symlinks,
        )
        if args.js_binary:
            finder_args['minify_js_verify_command'] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)),
                    'js-compare-ast.js')
            ]
        finder = FileFinder(args.source, find_executables=True,
                            **finder_args)
        if 'NO_PKG_FILES' in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter,
                                              args.manifest is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(''), 'bin/*')
        sink.close(args.manifest is not None)

        if args.removals:
            removals_in = StringIO(open(args.removals).read())
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpath.join(respath, 'removed-files'), removals)

    # shlibsign libraries
    if launcher.can_launch():
        if not mozinfo.isMac and buildconfig.substs.get('COMPILE_ENVIRONMENT'):
            for lib in SIGN_LIBS:
                libbase = mozpath.join(respath, '%s%s') \
                    % (buildconfig.substs['DLL_PREFIX'], lib)
                libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
                if copier.contains(libname):
                    copier.add(libbase + '.chk',
                               LibSignFile(os.path.join(args.destination,
                                                        libname)))

    # If a pdb file is present and we were instructed to copy it, include it.
    # Run on all OSes to capture MinGW builds
    if buildconfig.substs.get('MOZ_COPY_PDBS'):
        for p, f in copier:
            if isinstance(f, ExecutableFile):
                pdbname = os.path.splitext(f.inputs()[0])[0] + '.pdb'
                if os.path.exists(pdbname):
                    copier.add(os.path.basename(pdbname), File(pdbname))

    # Setup preloading
    if args.jarlog:
        if not os.path.exists(args.jarlog):
            raise Exception('Cannot find jar log: %s' % args.jarlog)
        omnijars = []
        if isinstance(formatter, OmniJarFormatter):
            omnijars = [mozpath.join(base, buildconfig.substs['OMNIJAR_NAME'])
                        for base in sink.packager.get_bases(addons=False)]

        from mozpack.mozjar import JarLog
        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            if respath:
                p = mozpath.relpath(p, respath)
            if p in log:
                f.preload(log[p])
            elif p in omnijars:
                raise Exception('No jar log data for %s' % p)

    copier.copy(args.destination)
    generate_precomplete(os.path.normpath(os.path.join(args.destination,
                                                       respath)))
Exemplo n.º 39
0
def main():
    parser = ArgumentParser()
    parser.add_argument('-D', dest='defines', action='append',
                        metavar="VAR[=VAL]", help='Define a variable')
    parser.add_argument('--format', default='omni',
                        help='Choose the chrome format for packaging ' +
                        '(omni, jar or flat ; default: %(default)s)')
    parser.add_argument('--removals', default=None,
                        help='removed-files source file')
    parser.add_argument('--ignore-errors', action='store_true', default=False,
                        help='Transform errors into warnings.')
    parser.add_argument('--minify', action='store_true', default=False,
                        help='Make some files more compact while packaging')
    parser.add_argument('--minify-js', action='store_true',
                        help='Minify JavaScript files while packaging.')
    parser.add_argument('--js-binary',
                        help='Path to js binary. This is used to verify '
                        'minified JavaScript. If this is not defined, '
                        'minification verification will not be performed.')
    parser.add_argument('--jarlog', default='', help='File containing jar ' +
                        'access logs')
    parser.add_argument('--optimizejars', action='store_true', default=False,
                        help='Enable jar optimizations')
    parser.add_argument('--unify', default='',
                        help='Base directory of another build to unify with')
    parser.add_argument('manifest', default=None, nargs='?',
                        help='Manifest file name')
    parser.add_argument('source', help='Source directory')
    parser.add_argument('destination', help='Destination directory')
    parser.add_argument('--non-resource', nargs='+', metavar='PATTERN',
                        default=[],
                        help='Extra files not to be considered as resources')
    args = parser.parse_args()

    defines = dict(buildconfig.defines)
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    copier = FileCopier()
    if args.format == 'flat':
        formatter = FlatFormatter(copier)
    elif args.format == 'jar':
        formatter = JarFormatter(copier, optimize=args.optimizejars)
    elif args.format == 'omni':
        formatter = OmniJarFormatter(copier,
                                     buildconfig.substs['OMNIJAR_NAME'],
                                     optimize=args.optimizejars,
                                     non_resources=args.non_resource)
    else:
        errors.fatal('Unknown format: %s' % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines['MOZ_OMNIJAR'] = 1
    elif 'MOZ_OMNIJAR' in defines:
        del defines['MOZ_OMNIJAR']

    respath = ''
    if 'RESPATH' in defines:
        respath = SimpleManifestSink.normalize_path(defines['RESPATH'])
    while respath.startswith('/'):
        respath = respath[1:]

    if args.unify:
        def is_native(path):
            path = os.path.abspath(path)
            return platform.machine() in mozpath.split(path)

        # Invert args.unify and args.source if args.unify points to the
        # native architecture.
        args.source, args.unify = sorted([args.source, args.unify],
                                         key=is_native, reverse=True)
        if is_native(args.source):
            launcher.tooldir = args.source
    elif not buildconfig.substs['CROSS_COMPILE']:
        launcher.tooldir = buildconfig.substs['LIBXUL_DIST']

    with errors.accumulate():
        finder_args = dict(
            minify=args.minify,
            minify_js=args.minify_js,
        )
        if args.js_binary:
            finder_args['minify_js_verify_command'] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)),
                    'js-compare-ast.js')
            ]
        if args.unify:
            finder = UnifiedBuildFinder(FileFinder(args.source),
                                        FileFinder(args.unify),
                                        **finder_args)
        else:
            finder = FileFinder(args.source, **finder_args)
        if 'NO_PKG_FILES' in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter,
                                              args.manifest is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(''), 'bin/*')
        sink.close(args.manifest is not None)

        if args.removals:
            removals_in = StringIO(open(args.removals).read())
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpath.join(respath, 'removed-files'), removals)

    # shlibsign libraries
    if launcher.can_launch():
        if not mozinfo.isMac:
            for lib in SIGN_LIBS:
                libbase = mozpath.join(respath, '%s%s') \
                    % (buildconfig.substs['DLL_PREFIX'], lib)
                libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
                if copier.contains(libname):
                    copier.add(libbase + '.chk',
                               LibSignFile(os.path.join(args.destination,
                                                        libname)))

    # Setup preloading
    if args.jarlog and os.path.exists(args.jarlog):
        from mozpack.mozjar import JarLog
        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            key = JarLog.canonicalize(os.path.join(args.destination, p))
            if key in log:
                f.preload(log[key])

    # Fill startup cache
    if isinstance(formatter, OmniJarFormatter) and launcher.can_launch() \
      and buildconfig.substs['MOZ_DISABLE_STARTUPCACHE'] != '1':
        gre_path = None
        def get_bases():
            for b in sink.packager.get_bases(addons=False):
                for p in (mozpath.join('bin', b), b):
                    if os.path.exists(os.path.join(args.source, p)):
                        yield p
                        break
        for base in sorted(get_bases()):
            if not gre_path:
                gre_path = base
            base_path = sink.normalize_path(base)
            if base_path in formatter.omnijars:
                precompile_cache(formatter.omnijars[base_path],
                                 args.source, gre_path, base)

    copier.copy(args.destination)
Exemplo n.º 40
0
def main():
    parser = ArgumentParser()
    parser.add_argument("-D", dest="defines", action="append", metavar="VAR[=VAL]", help="Define a variable")
    parser.add_argument(
        "--format",
        default="omni",
        help="Choose the chrome format for packaging " + "(omni, jar or flat ; default: %(default)s)",
    )
    parser.add_argument("--removals", default=None, help="removed-files source file")
    parser.add_argument("--ignore-errors", action="store_true", default=False, help="Transform errors into warnings.")
    parser.add_argument(
        "--minify", action="store_true", default=False, help="Make some files more compact while packaging"
    )
    parser.add_argument("--minify-js", action="store_true", help="Minify JavaScript files while packaging.")
    parser.add_argument(
        "--js-binary",
        help="Path to js binary. This is used to verify "
        "minified JavaScript. If this is not defined, "
        "minification verification will not be performed.",
    )
    parser.add_argument("--jarlog", default="", help="File containing jar " + "access logs")
    parser.add_argument("--optimizejars", action="store_true", default=False, help="Enable jar optimizations")
    parser.add_argument("--unify", default="", help="Base directory of another build to unify with")
    parser.add_argument(
        "--disable-compression", action="store_false", dest="compress", default=True, help="Disable jar compression"
    )
    parser.add_argument("manifest", default=None, nargs="?", help="Manifest file name")
    parser.add_argument("source", help="Source directory")
    parser.add_argument("destination", help="Destination directory")
    parser.add_argument(
        "--non-resource", nargs="+", metavar="PATTERN", default=[], help="Extra files not to be considered as resources"
    )
    args = parser.parse_args()

    defines = dict(buildconfig.defines)
    if args.ignore_errors:
        errors.ignore_errors()

    if args.defines:
        for name, value in [split_define(d) for d in args.defines]:
            defines[name] = value

    copier = FileCopier()
    if args.format == "flat":
        formatter = FlatFormatter(copier)
    elif args.format == "jar":
        formatter = JarFormatter(copier, compress=args.compress, optimize=args.optimizejars)
    elif args.format == "omni":
        formatter = OmniJarFormatter(
            copier,
            buildconfig.substs["OMNIJAR_NAME"],
            compress=args.compress,
            optimize=args.optimizejars,
            non_resources=args.non_resource,
        )
    else:
        errors.fatal("Unknown format: %s" % args.format)

    # Adjust defines according to the requested format.
    if isinstance(formatter, OmniJarFormatter):
        defines["MOZ_OMNIJAR"] = 1
    elif "MOZ_OMNIJAR" in defines:
        del defines["MOZ_OMNIJAR"]

    respath = ""
    if "RESPATH" in defines:
        respath = SimpleManifestSink.normalize_path(defines["RESPATH"])
    while respath.startswith("/"):
        respath = respath[1:]

    if args.unify:

        def is_native(path):
            path = os.path.abspath(path)
            return platform.machine() in mozpath.split(path)

        # Invert args.unify and args.source if args.unify points to the
        # native architecture.
        args.source, args.unify = sorted([args.source, args.unify], key=is_native, reverse=True)
        if is_native(args.source):
            launcher.tooldir = args.source
    elif not buildconfig.substs["CROSS_COMPILE"]:
        launcher.tooldir = mozpath.join(buildconfig.topobjdir, "dist")

    with errors.accumulate():
        finder_args = dict(minify=args.minify, minify_js=args.minify_js)
        if args.js_binary:
            finder_args["minify_js_verify_command"] = [
                args.js_binary,
                os.path.join(os.path.abspath(os.path.dirname(__file__)), "js-compare-ast.js"),
            ]
        if args.unify:
            finder = UnifiedBuildFinder(FileFinder(args.source), FileFinder(args.unify), **finder_args)
        else:
            finder = FileFinder(args.source, **finder_args)
        if "NO_PKG_FILES" in os.environ:
            sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None)
        else:
            sinkformatter = formatter
        sink = SimpleManifestSink(finder, sinkformatter)
        if args.manifest:
            preprocess_manifest(sink, args.manifest, defines)
        else:
            sink.add(Component(""), "bin/*")
        sink.close(args.manifest is not None)

        if args.removals:
            removals_in = StringIO(open(args.removals).read())
            removals_in.name = args.removals
            removals = RemovedFiles(copier)
            preprocess(removals_in, removals, defines)
            copier.add(mozpath.join(respath, "removed-files"), removals)

    # shlibsign libraries
    if launcher.can_launch():
        if not mozinfo.isMac and buildconfig.substs.get("COMPILE_ENVIRONMENT"):
            for lib in SIGN_LIBS:
                libbase = mozpath.join(respath, "%s%s") % (buildconfig.substs["DLL_PREFIX"], lib)
                libname = "%s%s" % (libbase, buildconfig.substs["DLL_SUFFIX"])
                if copier.contains(libname):
                    copier.add(libbase + ".chk", LibSignFile(os.path.join(args.destination, libname)))

    # Setup preloading
    if args.jarlog and os.path.exists(args.jarlog):
        from mozpack.mozjar import JarLog

        log = JarLog(args.jarlog)
        for p, f in copier:
            if not isinstance(f, Jarrer):
                continue
            key = JarLog.canonicalize(os.path.join(args.destination, p))
            if key in log:
                f.preload(log[key])

    # Fill startup cache
    if (
        isinstance(formatter, OmniJarFormatter)
        and launcher.can_launch()
        and buildconfig.substs["MOZ_DISABLE_STARTUPCACHE"] != "1"
    ):
        gre_path = None

        def get_bases():
            for b in sink.packager.get_bases(addons=False):
                for p in (mozpath.join("bin", b), b):
                    if os.path.exists(os.path.join(args.source, p)):
                        yield p
                        break

        for base in sorted(get_bases()):
            if not gre_path:
                gre_path = base
            omnijar_path = mozpath.join(sink.normalize_path(base), buildconfig.substs["OMNIJAR_NAME"])
            if formatter.contains(omnijar_path):
                precompile_cache(formatter.copier[omnijar_path], args.source, gre_path, base)

    copier.copy(args.destination)