예제 #1
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Merge two builds of a Gecko-based application into a Universal build")
    parser.add_argument("app1", help="Directory containing the application")
    parser.add_argument("app2",
                        help="Directory containing the application to merge")
    parser.add_argument(
        "--non-resource",
        nargs="+",
        metavar="PATTERN",
        default=[],
        help="Extra files not to be considered as resources",
    )

    options = parser.parse_args()

    buildconfig.substs["OS_ARCH"] = "Darwin"
    buildconfig.substs["LIPO"] = os.environ.get("LIPO")

    app1_finder = UnpackFinder(FileFinder(options.app1, find_executables=True))
    app2_finder = UnpackFinder(FileFinder(options.app2, find_executables=True))
    app_finder = UnifiedBuildFinderWasmHack(app1_finder, app2_finder)

    copier = FileCopier()
    compress = min(app1_finder.compressed, JAR_DEFLATED)
    if app1_finder.kind == "flat":
        formatter = FlatFormatter(copier)
    elif app1_finder.kind == "jar":
        formatter = JarFormatter(copier, compress=compress)
    elif app1_finder.kind == "omni":
        formatter = OmniJarFormatter(
            copier,
            app1_finder.omnijar,
            compress=compress,
            non_resources=options.non_resource,
        )

    with errors.accumulate():
        packager = SimplePackager(formatter)
        for p, f in app_finder:
            packager.add(p, f)
        packager.close()

        # Transplant jar preloading information.
        for path, log in six.iteritems(app1_finder.jarlogs):
            assert isinstance(copier[path], Jarrer)
            copier[path].preload(log)

    copier.copy(options.app1, skip_if_older=False)
예제 #2
0
파일: l10n.py 프로젝트: nalmt/gecko-dev
def repack(source, l10n, extra_l10n={}, non_resources=[], non_chrome=set()):
    '''
    Replace localized data from the `source` directory with localized data
    from `l10n` and `extra_l10n`.

    The `source` argument points to a directory containing a packaged
    application (in omnijar, jar or flat form).
    The `l10n` argument points to a directory containing the main localized
    data (usually in the form of a language pack addon) to use to replace
    in the packaged application.
    The `extra_l10n` argument contains a dict associating relative paths in
    the source to separate directories containing localized data for them.
    This can be used to point at different language pack addons for different
    parts of the package application.
    The `non_resources` argument gives a list of relative paths in the source
    that should not be added in an omnijar in case the packaged application
    is in that format.
    The `non_chrome` argument gives a list of file/directory patterns for
    localized files that are not listed in a chrome.manifest.
    '''
    app_finder = UnpackFinder(source)
    l10n_finder = UnpackFinder(l10n)
    if extra_l10n:
        finders = {
            '': l10n_finder,
        }
        for base, path in extra_l10n.iteritems():
            finders[base] = UnpackFinder(path)
        l10n_finder = ComposedFinder(finders)
    copier = FileCopier()
    compress = min(app_finder.compressed, JAR_DEFLATED)
    if app_finder.kind == 'flat':
        formatter = FlatFormatter(copier)
    elif app_finder.kind == 'jar':
        formatter = JarFormatter(copier,
                                 optimize=app_finder.optimizedjars,
                                 compress=compress)
    elif app_finder.kind == 'omni':
        formatter = OmniJarFormatter(copier,
                                     app_finder.omnijar,
                                     optimize=app_finder.optimizedjars,
                                     compress=compress,
                                     non_resources=non_resources)

    with errors.accumulate():
        _repack(app_finder, l10n_finder, copier, formatter, non_chrome)
    copier.copy(source, skip_if_older=False)
    generate_precomplete(source)
예제 #3
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False

        with JarWriter(file=processed_filename,
                       optimize=False,
                       compress_level=5) as writer:
            with tarfile.open(filename) as reader:
                for p, f in UnpackFinder(TarFinder(filename, reader)):
                    if not any(
                            mozpath.match(p, pat)
                            for pat in self.package_artifact_patterns):
                        continue

                    # We strip off the relative "firefox/" bit from the path,
                    # but otherwise preserve it.
                    destpath = mozpath.join('bin',
                                            mozpath.relpath(p, "firefox"))
                    self.log(logging.INFO, 'artifact', {'destpath': destpath},
                             'Adding {destpath} to processed archive')
                    writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)
                    added_entry = True

        if not added_entry:
            raise ValueError(
                'Archive format changed! No pattern from "{patterns}" '
                'matched an archive path.'.format(
                    patterns=LinuxArtifactJob.package_artifact_patterns))
예제 #4
0
파일: find-dupes.py 프로젝트: yws/Waterfox
def find_dupes(source, allowed_dupes, bail=True):
    allowed_dupes = set(allowed_dupes)
    md5s = OrderedDict()
    for p, f in UnpackFinder(source):
        content = f.open().read()
        m = hashlib.md5(content).digest()
        if m not in md5s:
            if isinstance(f, DeflatedFile):
                compressed = f.file.compressed_size
            else:
                compressed = len(content)
            md5s[m] = (len(content), compressed, [])
        md5s[m][2].append(p)
    total = 0
    total_compressed = 0
    num_dupes = 0
    for m, (size, compressed, paths) in sorted(md5s.iteritems(),
                                               key=lambda x: x[1][1]):
        if len(paths) > 1:
            print 'Duplicates %d bytes%s%s:' % (
                size, ' (%d compressed)' %
                compressed if compressed != size else '', ' (%d times)' %
                (len(paths) - 1) if len(paths) > 2 else '')
            print ''.join('  %s\n' % p for p in paths)
            total += (len(paths) - 1) * size
            total_compressed += (len(paths) - 1) * compressed
            num_dupes += 1

    if num_dupes:
        print "WARNING: Found %d duplicated files taking %d bytes (%s)" % \
              (num_dupes, total,
               '%d compressed' % total_compressed if total_compressed != total
                                                  else 'uncompressed')
예제 #5
0
def repack(source, l10n, non_resources=[], non_chrome=set()):
    app_finder = UnpackFinder(source)
    l10n_finder = UnpackFinder(l10n)
    copier = FileCopier()
    if app_finder.kind == 'flat':
        formatter = FlatFormatter(copier)
    elif app_finder.kind == 'jar':
        formatter = JarFormatter(copier, optimize=app_finder.optimizedjars)
    elif app_finder.kind == 'omni':
        formatter = OmniJarFormatter(copier, app_finder.omnijar,
                                     optimize=app_finder.optimizedjars,
                                     non_resources=non_resources)

    with errors.accumulate():
        _repack(app_finder, l10n_finder, copier, formatter, non_chrome)
    copier.copy(source, skip_if_older=False)
    generate_precomplete(source)
예제 #6
0
def find_dupes(source, allowed_dupes, bail=True):
    chunk_size = 1024 * 10
    allowed_dupes = set(allowed_dupes)
    checksums = OrderedDict()
    for p, f in UnpackFinder(source):
        checksum = hashlib.sha1()
        content_size = 0
        for buf in iter(functools.partial(f.open().read, chunk_size), b""):
            checksum.update(six.ensure_binary(buf))
            content_size += len(six.ensure_binary(buf))
        m = checksum.digest()
        if m not in checksums:
            if isinstance(f, DeflatedFile):
                compressed = f.file.compressed_size
            else:
                compressed = content_size
            checksums[m] = (content_size, compressed, [])
        checksums[m][2].append(p)
    total = 0
    total_compressed = 0
    num_dupes = 0
    unexpected_dupes = []
    for m, (size, compressed, paths) in sorted(
        six.iteritems(checksums), key=lambda x: x[1][1]
    ):
        if len(paths) > 1:
            _compressed = " (%d compressed)" % compressed if compressed != size else ""
            _times = " (%d times)" % (len(paths) - 1) if len(paths) > 2 else ""
            print("Duplicates {} bytes{}{}:".format(size, _compressed, _times))
            print("".join("  %s\n" % p for p in paths))
            total += (len(paths) - 1) * size
            total_compressed += (len(paths) - 1) * compressed
            num_dupes += 1

            for p in paths:
                if not is_l10n_file(p) and normalize_path(p) not in allowed_dupes:
                    unexpected_dupes.append(p)

    if num_dupes:
        total_compressed = (
            "%d compressed" % total_compressed
            if total_compressed != total
            else "uncompressed"
        )
        print(
            "WARNING: Found {} duplicated files taking {} bytes ({})".format(
                num_dupes, total, total_compressed
            )
        )

    if unexpected_dupes:
        errortype = "ERROR" if bail else "WARNING"
        print("{}: The following duplicated files are not allowed:".format(errortype))
        print("\n".join(unexpected_dupes))
        if bail:
            sys.exit(1)
예제 #7
0
def find_dupes(source, allowed_dupes, bail=True):
    md5_chunk_size = 1024 * 10
    allowed_dupes = set(allowed_dupes)
    md5s = OrderedDict()
    for p, f in UnpackFinder(source):
        md5 = hashlib.md5()
        content_size = 0
        for buf in iter(functools.partial(f.open('rb').read, md5_chunk_size),
                        b''):
            md5.update(six.ensure_binary(buf))
            content_size += len(six.ensure_binary(buf))
        m = md5.digest()
        if m not in md5s:
            if isinstance(f, DeflatedFile):
                compressed = f.file.compressed_size
            else:
                compressed = content_size
            md5s[m] = (content_size, compressed, [])
        md5s[m][2].append(p)
    total = 0
    total_compressed = 0
    num_dupes = 0
    unexpected_dupes = []
    for m, (size, compressed, paths) in sorted(six.iteritems(md5s),
                                               key=lambda x: x[1][1]):
        if len(paths) > 1:
            _compressed = ' (%d compressed)' % compressed if compressed != size else ''
            _times = ' (%d times)' % (len(paths) - 1) if len(paths) > 2 else ''
            print('Duplicates {} bytes{}{}:'.format(size, _compressed, _times))
            print(''.join('  %s\n' % p for p in paths))
            total += (len(paths) - 1) * size
            total_compressed += (len(paths) - 1) * compressed
            num_dupes += 1

            for p in paths:
                if not is_l10n_file(p) and normalize_path(
                        p) not in allowed_dupes:
                    unexpected_dupes.append(p)

    if num_dupes:
        total_compressed = '%d compressed' % total_compressed \
            if total_compressed != total else 'uncompressed'
        print("WARNING: Found {} duplicated files taking {} bytes ({})".format(
            num_dupes, total, total_compressed))

    if unexpected_dupes:
        errortype = "ERROR" if bail else "WARNING"
        print("{}: The following duplicated files are not allowed:".format(
            errortype))
        print("\n".join(unexpected_dupes))
        if bail:
            sys.exit(1)
예제 #8
0
def find_dupes(source, allowed_dupes, bail=True):
    md5_chunk_size = 1024 * 10
    allowed_dupes = set(allowed_dupes)
    md5s = OrderedDict()
    for p, f in UnpackFinder(source):
        md5 = hashlib.md5()
        content_size = 0
        for buf in iter(functools.partial(f.open().read, md5_chunk_size), b''):
            md5.update(buf)
            content_size += len(buf)
        m = md5.digest()
        if m not in md5s:
            if isinstance(f, DeflatedFile):
                compressed = f.file.compressed_size
            else:
                compressed = content_size
            md5s[m] = (content_size, compressed, [])
        md5s[m][2].append(p)
    total = 0
    total_compressed = 0
    num_dupes = 0
    unexpected_dupes = []
    for m, (size, compressed, paths) in sorted(md5s.iteritems(),
                                               key=lambda x: x[1][1]):
        if len(paths) > 1:
            print 'Duplicates %d bytes%s%s:' % (
                size, ' (%d compressed)' %
                compressed if compressed != size else '', ' (%d times)' %
                (len(paths) - 1) if len(paths) > 2 else '')
            print ''.join('  %s\n' % p for p in paths)
            total += (len(paths) - 1) * size
            total_compressed += (len(paths) - 1) * compressed
            num_dupes += 1

            for p in paths:
                if not is_l10n_file(p) and normalize_path(
                        p) not in allowed_dupes:
                    unexpected_dupes.append(p)

    if num_dupes:
        print "WARNING: Found %d duplicated files taking %d bytes (%s)" % \
              (num_dupes, total,
               '%d compressed' % total_compressed if total_compressed != total
                                                  else 'uncompressed')

    if unexpected_dupes:
        errortype = "ERROR" if bail else "WARNING"
        print "%s: The following duplicated files are not allowed:" % errortype
        print "\n".join(unexpected_dupes)
        if bail:
            sys.exit(1)
예제 #9
0
    def process_package_artifact(self, filename, processed_filename):
        # Extract all .so files into the root, which will get copied into dist/bin.
        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                    continue

                dirname, basename = os.path.split(p)
                self.log(logging.INFO, 'artifact',
                    {'basename': basename},
                   'Adding {basename} to processed archive')

                basedir = 'bin'
                if not basename.endswith('.so'):
                    basedir = mozpath.join('bin', dirname.lstrip('assets/'))
                basename = mozpath.join(basedir, basename)
                writer.add(basename.encode('utf-8'), f.open())
예제 #10
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False
        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                    continue

                # strip off the relative "firefox/" bit from the path:
                basename = mozpath.relpath(p, "firefox")
                basename = mozpath.join('bin', basename)
                self.log(logging.INFO, 'artifact',
                    {'basename': basename},
                    'Adding {basename} to processed archive')
                writer.add(basename.encode('utf-8'), f.open(), mode=f.mode)
                added_entry = True

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}"'
                             'matched an archive path.'.format(
                                 patterns=self.artifact_patterns))
예제 #11
0
    def process_package_artifact(self, filename, processed_filename):
        tempdir = tempfile.mkdtemp()
        try:
            self.log(logging.INFO, 'artifact', {'tempdir': tempdir},
                     'Unpacking DMG into {tempdir}')
            mozinstall.install(
                filename,
                tempdir)  # Doesn't handle already mounted DMG files nicely:

            # InstallError: Failed to install "/Users/nalexander/.mozbuild/package-frontend/b38eeeb54cdcf744-firefox-44.0a1.en-US.mac.dmg (local variable 'appDir' referenced before assignment)"

            #   File "/Users/nalexander/Mozilla/gecko/mobile/android/mach_commands.py", line 250, in artifact_install
            #     return artifacts.install_from(source, self.distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 457, in install_from
            #     return self.install_from_hg(source, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 445, in install_from_hg
            #     return self.install_from_url(url, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 418, in install_from_url
            #     return self.install_from_file(filename, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 336, in install_from_file
            #     mozinstall.install(filename, tempdir)
            #   File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 117, in install
            #     install_dir = _install_dmg(src, dest)
            #   File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 261, in _install_dmg
            #     subprocess.call('hdiutil detach %s -quiet' % appDir,

            bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
            if len(bundle_dirs) != 1:
                raise ValueError(
                    'Expected one source bundle, found: {}'.format(
                        bundle_dirs))
            [source] = bundle_dirs

            # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
            paths_no_keep_path = (
                'Contents/MacOS',
                [
                    'crashreporter.app/Contents/MacOS/crashreporter',
                    'firefox',
                    'firefox-bin',
                    'libfreebl3.dylib',
                    'liblgpllibs.dylib',
                    # 'liblogalloc.dylib',
                    'libmozglue.dylib',
                    'libnss3.dylib',
                    'libnssckbi.dylib',
                    'libnssdbm3.dylib',
                    'libplugin_child_interpose.dylib',
                    # 'libreplace_jemalloc.dylib',
                    # 'libreplace_malloc.dylib',
                    'libmozavutil.dylib',
                    'libmozavcodec.dylib',
                    'libsoftokn3.dylib',
                    'plugin-container.app/Contents/MacOS/plugin-container',
                    'updater.app/Contents/MacOS/org.mozilla.updater',
                    # 'xpcshell',
                    'XUL',
                ])

            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
            paths_keep_path = (
                'Contents/Resources',
                [
                    'browser/components/libbrowsercomps.dylib',
                    'dependentlibs.list',
                    # 'firefox',
                    'gmp-clearkey/0.1/libclearkey.dylib',
                    # 'gmp-fake/1.0/libfake.dylib',
                    # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                    '**/interfaces.xpt',
                ])

            with JarWriter(file=processed_filename,
                           optimize=False,
                           compress_level=5) as writer:
                root, paths = paths_no_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact', {'path': p},
                                 'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', os.path.basename(p))
                        writer.add(destpath.encode('utf-8'), f, mode=f.mode)

                root, paths = paths_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact', {'path': p},
                                 'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', p)
                        writer.add(destpath.encode('utf-8'),
                                   f.open(),
                                   mode=f.mode)

        finally:
            try:
                shutil.rmtree(tempdir)
            except (OSError, IOError):
                self.log(logging.WARN, 'artifact', {'tempdir': tempdir},
                         'Unable to delete {tempdir}')
                pass
예제 #12
0
    def process_package_artifact(self, filename, processed_filename):
        tempdir = tempfile.mkdtemp()
        oldcwd = os.getcwd()
        try:
            self.log(logging.INFO, 'artifact',
                {'tempdir': tempdir},
                'Unpacking DMG into {tempdir}')
            if self._substs['HOST_OS_ARCH'] == 'Linux':
                # This is a cross build, use hfsplus and dmg tools to extract the dmg.
                os.chdir(tempdir)
                with open(os.devnull, 'wb') as devnull:
                    subprocess.check_call([
                        self._substs['DMG_TOOL'],
                        'extract',
                        filename,
                        'extracted_img',
                    ], stdout=devnull)
                    subprocess.check_call([
                        self._substs['HFS_TOOL'],
                        'extracted_img',
                        'extractall'
                    ], stdout=devnull)
            else:
                mozinstall.install(filename, tempdir)

            bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
            if len(bundle_dirs) != 1:
                raise ValueError('Expected one source bundle, found: {}'.format(bundle_dirs))
            [source] = bundle_dirs

            # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
            paths_no_keep_path = ('Contents/MacOS', [
                'crashreporter.app/Contents/MacOS/crashreporter',
                'crashreporter.app/Contents/MacOS/minidump-analyzer',
                'firefox',
                'firefox-bin',
                'libfreebl3.dylib',
                'liblgpllibs.dylib',
                # 'liblogalloc.dylib',
                'libmozglue.dylib',
                'libnss3.dylib',
                'libnssckbi.dylib',
                'libnssdbm3.dylib',
                'libplugin_child_interpose.dylib',
                # 'libreplace_jemalloc.dylib',
                # 'libreplace_malloc.dylib',
                'libmozavutil.dylib',
                'libmozavcodec.dylib',
                'libsoftokn3.dylib',
                'pingsender',
                'plugin-container.app/Contents/MacOS/plugin-container',
                'updater.app/Contents/MacOS/org.mozilla.updater',
                # 'xpcshell',
                'XUL',
            ])

            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
            paths_keep_path = ('Contents/Resources', [
                'browser/components/libbrowsercomps.dylib',
                'dependentlibs.list',
                # 'firefox',
                'gmp-clearkey/0.1/libclearkey.dylib',
                # 'gmp-fake/1.0/libfake.dylib',
                # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                '**/interfaces.xpt',
            ])

            with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
                root, paths = paths_no_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', os.path.basename(p))
                        writer.add(destpath.encode('utf-8'), f, mode=f.mode)

                root, paths = paths_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', p)
                        writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)

        finally:
            os.chdir(oldcwd)
            try:
                shutil.rmtree(tempdir)
            except (OSError, IOError):
                self.log(logging.WARN, 'artifact',
                    {'tempdir': tempdir},
                    'Unable to delete {tempdir}')
                pass
예제 #13
0
def fat_aar(distdir,
            aars_paths,
            no_process=False,
            no_compatibility_check=False):
    if no_process:
        print('Not processing architecture-specific artifact Maven AARs.')
        return 0

    # Map {filename: {fingerprint: [arch1, arch2, ...]}}.
    diffs = defaultdict(lambda: defaultdict(list))
    missing_arch_prefs = set()
    # Collect multi-architecture inputs to the fat AAR.
    copier = FileCopier()

    for arch, aar_path in aars_paths.items():
        # Map old non-architecture-specific path to new architecture-specific path.
        old_rewrite_map = {
            'greprefs.js':
            '{}/greprefs.js'.format(arch),
            'defaults/pref/geckoview-prefs.js':
            'defaults/pref/{}/geckoview-prefs.js'.format(arch),
        }

        # Architecture-specific preferences files.
        arch_prefs = set(old_rewrite_map.values())
        missing_arch_prefs |= set(arch_prefs)

        jar_finder = JarFinder(aar_path, JarReader(aar_path))
        for path, fileobj in UnpackFinder(jar_finder):
            # Native libraries go straight through.
            if mozpath.match(path, 'jni/**'):
                copier.add(path, fileobj)

            elif path in arch_prefs:
                copier.add(path, fileobj)

            elif path in ('classes.jar', 'annotations.zip'):
                # annotations.zip differs due to timestamps, but the contents should not.

                # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt,
                # and it's not worth working around, so we use Python's zip functionality
                # instead.
                z = ZipFile(BytesIO(fileobj.open().read()))
                for r in z.namelist():
                    fingerprint = sha1(z.open(r).read()).hexdigest()
                    diffs['{}!/{}'.format(path, r)][fingerprint].append(arch)

            else:
                fingerprint = sha1(fileobj.open().read()).hexdigest()
                # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here,
                # since in practice they will never overlap.
                diffs[path][fingerprint].append(arch)

            missing_arch_prefs.discard(path)

    # Some differences are allowed across the architecture-specific AARs.  We could allow-list
    # the actual content, but it's not necessary right now.
    allow_pattern_list = {
        'AndroidManifest.xml',  # Min SDK version is different for 32- and 64-bit builds.
        'classes.jar!/org/mozilla/gecko/util/HardwareUtils.class',  # Min SDK as well.
        'classes.jar!/org/mozilla/geckoview/BuildConfig.class',
        # Each input captures its CPU architecture.
        'chrome/toolkit/content/global/buildconfig.html',
        # Bug 1556162: localized resources are not deterministic across
        # per-architecture builds triggered from the same push.
        '**/*.ftl',
        '**/*.dtd',
        '**/*.properties',
    }

    not_allowed = OrderedDict()

    def format_diffs(ds):
        # Like '  armeabi-v7a, arm64-v8a -> XXX\n  x86, x86_64 -> YYY'.
        return '\n'.join(
            sorted('  {archs} -> {fingerprint}'.format(
                archs=', '.join(sorted(archs)), fingerprint=fingerprint)
                   for fingerprint, archs in ds.iteritems()))

    for p, ds in sorted(diffs.iteritems()):
        if len(ds) <= 1:
            # Only one hash across all inputs: roll on.
            continue

        if any(mozpath.match(p, pat) for pat in allow_pattern_list):
            print(
                'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
                .format(path=p, ds_repr=format_diffs(ds)))
            continue

        not_allowed[p] = ds

    for p, ds in not_allowed.iteritems():
        print(
            'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
            .format(path=p, ds_repr=format_diffs(ds)))

    for missing in sorted(missing_arch_prefs):
        print(
            'Disallowed: Inputs missing expected architecture-specific input: {missing}'
            .format(missing=missing))

    if not no_compatibility_check and (missing_arch_prefs or not_allowed):
        return 1

    output_dir = mozpath.join(distdir, 'output')
    copier.copy(output_dir)

    return 0
예제 #14
0
파일: msix.py 프로젝트: Floflis/gecko-b2g
def repackage_msix(
    dir_or_package,
    channel=None,
    branding=None,
    template=None,
    distribution_dirs=[],
    locale_allowlist=set(),
    version=None,
    vendor=None,
    displayname=None,
    app_name="firefox",
    identity=None,
    publisher=None,
    publisher_display_name="Mozilla Corporation",
    arch=None,
    output=None,
    force=False,
    log=None,
    verbose=False,
    makeappx=None,
):
    if not channel:
        raise Exception("channel is required")
    if channel not in ["official", "beta", "aurora", "nightly", "unofficial"]:
        raise Exception("channel is unrecognized: {}".format(channel))

    if not branding:
        raise Exception("branding dir is required")
    if not os.path.isdir(branding):
        raise Exception("branding dir {} does not exist".format(branding))

    # TODO: maybe we can fish this from the package directly?  Maybe from a DLL,
    # maybe from application.ini?
    if arch is None or arch not in _MSIX_ARCH.keys():
        raise Exception(
            "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys())
        )

    if not os.path.exists(dir_or_package):
        raise Exception("{} does not exist".format(dir_or_package))

    if (
        os.path.isfile(dir_or_package)
        and os.path.splitext(dir_or_package)[1] == ".msix"
    ):
        # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
        msix_dir = mozpath.normsep(
            mozpath.join(
                get_state_dir(),
                "cache",
                "mach-msix",
                "msix-unpack",
            )
        )

        if os.path.exists(msix_dir):
            shutil.rmtree(msix_dir)
        ensureParentDir(msix_dir)

        dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose)

    log(
        logging.INFO,
        "msix",
        {
            "input": dir_or_package,
        },
        "Adding files from '{input}'",
    )

    if os.path.isdir(dir_or_package):
        finder = FileFinder(dir_or_package)
    else:
        finder = JarFinder(dir_or_package, JarReader(dir_or_package))

    values = get_application_ini_values(
        finder,
        dict(section="App", value="CodeName", fallback="Name"),
        dict(section="App", value="Vendor"),
    )
    first = next(values)
    displayname = displayname or "Mozilla {}".format(first)
    second = next(values)
    vendor = vendor or second

    # For `AppConstants.jsm` and `brand.properties`, which are in the omnijar in packaged builds.
    # The nested langpack XPI files can't be read by `mozjar.py`.
    unpack_finder = UnpackFinder(finder, unpack_xpi=False)

    if not version:
        values = get_appconstants_jsm_values(
            unpack_finder, "MOZ_APP_VERSION_DISPLAY", "MOZ_BUILDID"
        )
        display_version = next(values)
        buildid = next(values)
        version = get_embedded_version(display_version, buildid)
        log(
            logging.INFO,
            "msix",
            {
                "version": version,
                "display_version": display_version,
                "buildid": buildid,
            },
            "AppConstants.jsm display version is '{display_version}' and build ID is '{buildid}':"
            + " embedded version will be '{version}'",
        )

    # TODO: Bug 1721922: localize this description via Fluent.
    lines = []
    for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"):
        lines.extend(
            line
            for line in f.open().read().decode("utf-8").splitlines()
            if "brandFullName" in line
        )
    (brandFullName,) = lines  # We expect exactly one definition.
    _, _, brandFullName = brandFullName.partition("=")
    brandFullName = brandFullName.strip()

    # We don't have a build at repackage-time to gives us this value, and the
    # source of truth is a branding-specific `configure.sh` shell script that we
    # can't easily evaluate completely here.  Instead, we take the last value
    # from `configure.sh`.
    lines = [
        line
        for line in open(mozpath.join(branding, "configure.sh")).readlines()
        if "MOZ_IGECKOBACKCHANNEL_IID" in line
    ]
    MOZ_IGECKOBACKCHANNEL_IID = lines[-1]
    _, _, MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.partition("=")
    MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.strip()
    if MOZ_IGECKOBACKCHANNEL_IID.startswith(('"', "'")):
        MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID[1:-1]

    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    output_dir = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel)
        )
    )

    if channel == "beta":
        # Release (official) and Beta share branding.  Differentiate Beta a little bit.
        displayname += " Beta"
        brandFullName += " Beta"

    # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta
    # Package Root'.  This is `BrandFullName` in the installer, and we want to
    # be close but to not match.  By not matching, we hope to prevent confusion
    # and/or errors between regularly installed builds and App Package builds.
    instdir = "{} Package Root".format(displayname)

    # The standard package name is like "CompanyNoSpaces.ProductNoSpaces".
    identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "")

    # We might want to include the publisher ID hash here.  I.e.,
    # "__{publisherID}".  My locally produced MSIX was named like
    # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a
    # missing field, but it's not necessary, since this is just an output file name.
    package_output_name = "{identity}_{version}_{arch}".format(
        identity=identity, version=version, arch=_MSIX_ARCH[arch]
    )
    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    default_output = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name)
        )
    )
    output = output or default_output
    log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}")

    m = InstallManifest()
    m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri")

    m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets")
    m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS")

    copier = FileCopier()

    # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead.
    for p, f in finder:
        if not os.path.isdir(dir_or_package):
            # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe".
            pp = os.path.relpath(p, "firefox")
        else:
            # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already.
            pp = p

        if pp.startswith("distribution"):
            # Treat any existing distribution as a distribution directory,
            # potentially with language packs. This makes it easy to repack
            # unpacked MSIXes.
            distribution_dir = mozpath.join(dir_or_package, "distribution")
            if distribution_dir not in distribution_dirs:
                distribution_dirs.append(distribution_dir)

            continue

        copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f)

    # Locales to declare as supported in `AppxManifest.xml`.
    locales = set(["en-US"])

    for distribution_dir in [
        mozpath.join(template, "distribution")
    ] + distribution_dirs:
        log(
            logging.INFO,
            "msix",
            {"dir": distribution_dir},
            "Adding distribution files from {dir}",
        )

        # In automation, we have no easy way to remap the names of artifacts fetched from dependent
        # tasks.  In particular, langpacks will be named like `target.langpack.xpi`.  The fetch
        # tasks do allow us to put them in a per-locale directory, so that the entire set can be
        # fetched.  Here we remap the names.
        finder = FileFinder(distribution_dir)

        for p, f in finder:
            locale = None
            if os.path.basename(p) == "target.langpack.xpi":
                # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE".  This is how langpacks
                # are presented in CI.
                base, locale = os.path.split(os.path.dirname(p))

                # Like "locale-LOCALE/[email protected]".  This is what AMO
                # serves and how flatpak builds name langpacks, but not how snap builds name
                # langpacks.  I can't explain the discrepancy.
                dest = mozpath.normsep(
                    mozpath.join(
                        base,
                        f"locale-{locale}",
                        f"langpack-{locale}@firefox.mozilla.org.xpi",
                    )
                )

                log(
                    logging.DEBUG,
                    "msix",
                    {"path": p, "dest": dest},
                    "Renaming langpack {path} to {dest}",
                )

            elif os.path.basename(p).startswith("langpack-"):
                # Turn "/path/to/[email protected]" into "LOCALE".  This is
                # how langpacks are presented from an unpacked MSIX.
                _, _, locale = os.path.basename(p).partition("langpack-")
                locale, _, _ = locale.partition("@")
                dest = p

            else:
                dest = p

            if locale:
                locale = locale.strip().lower()
                locales.add(locale)
                log(
                    logging.DEBUG,
                    "msix",
                    {"locale": locale, "dest": dest},
                    "Distributing locale '{locale}' from {dest}",
                )

            dest = mozpath.normsep(
                mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest)
            )
            if copier.contains(dest):
                log(
                    logging.INFO,
                    "msix",
                    {"dest": dest, "path": mozpath.join(finder.base, p)},
                    "Skipping duplicate: {dest} from {path}",
                )
                continue

            log(
                logging.DEBUG,
                "msix",
                {"dest": dest, "path": mozpath.join(finder.base, p)},
                "Adding distribution path: {dest} from {path}",
            )

            copier.add(
                dest,
                f,
            )

    locales.remove("en-US")

    # Windows MSIX packages support a finite set of locales: see
    # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in
    # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales.
    # We distribute all of the langpacks supported by the release channel in our MSIX, which is
    # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales.  But we
    # only advertise support in the App manifest for the intersection of that set and the set of
    # supported locales.
    #
    # We distribute all langpacks to avoid the following issue.  Suppose a user manually installs a
    # langpack that is not supported by Windows, and then updates the installed MSIX package.  MSIX
    # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to
    # update the langpack before the update.  But, since all langpacks are bundled with the MSIX,
    # that langpack will be up-to-date, preventing one class of YSOD.
    unadvertised = set()
    if locale_allowlist:
        unadvertised = locales - locale_allowlist
        locales = locales & locale_allowlist
    for locale in sorted(unadvertised):
        log(
            logging.INFO,
            "msix",
            {"locale": locale},
            "Not advertising distributed locale '{locale}' that is not recognized by Windows",
        )

    locales = ["en-US"] + list(sorted(locales))
    resource_language_list = "\n".join(
        f'    <Resource Language="{locale}" />' for locale in sorted(locales)
    )

    defines = {
        "APPX_ARCH": _MSIX_ARCH[arch],
        "APPX_DISPLAYNAME": brandFullName,
        "APPX_DESCRIPTION": brandFullName,
        # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or
        # 'Mozilla.MozillaFirefoxNightly'.
        "APPX_IDENTITY": identity,
        # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox
        # Beta Package Root'.  See above.
        "APPX_INSTDIR": instdir,
        # Like 'Firefox%20Package%20Root'.
        "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir),
        "APPX_PUBLISHER": publisher,
        "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name,
        "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list,
        "APPX_VERSION": version,
        "MOZ_APP_DISPLAYNAME": displayname,
        "MOZ_APP_NAME": app_name,
        "MOZ_IGECKOBACKCHANNEL_IID": MOZ_IGECKOBACKCHANNEL_IID,
    }

    m.add_preprocess(
        mozpath.join(template, "AppxManifest.xml.in"),
        "AppxManifest.xml",
        [],
        defines=defines,
        marker="<!-- #",  # So that we can have well-formed XML.
    )
    m.populate_registry(copier)

    output_dir = mozpath.abspath(output_dir)
    ensureParentDir(output_dir)

    start = time.time()
    result = copier.copy(
        output_dir, remove_empty_directories=True, skip_if_older=not force
    )
    if log:
        log_copy_result(log, time.time() - start, output_dir, result)

    if verbose:
        # Dump AppxManifest.xml contents for ease of debugging.
        log(logging.DEBUG, "msix", {}, "AppxManifest.xml")
        log(logging.DEBUG, "msix", {}, ">>>")
        for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines():
            log(logging.DEBUG, "msix", {}, line[:-1])  # Drop trailing line terminator.
        log(logging.DEBUG, "msix", {}, "<<<")

    if not makeappx:
        makeappx = find_sdk_tool("makeappx.exe", log=log)
    if not makeappx:
        raise ValueError(
            "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH"
        )

    # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix`
    # supports only hyphen style.  `makeappx.exe` allows to overwrite and to
    # provide more feedback, so we prefer invoking with these flags.  This will
    # also accommodate `wine makeappx.exe`.
    stdout = subprocess.run(
        [makeappx], check=False, capture_output=True, universal_newlines=True
    ).stdout
    is_makeappx = "MakeAppx Tool" in stdout

    if is_makeappx:
        args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"]
    else:
        args = [makeappx, "pack", "-d", output_dir, "-p", output]
    if verbose and is_makeappx:
        args.append("/verbose")
    joined = " ".join(shlex_quote(arg) for arg in args)
    log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}")

    sys.stdout.flush()  # Otherwise the subprocess output can be interleaved.
    if verbose:
        subprocess.check_call(args, universal_newlines=True)
    else:
        # Suppress output unless we fail.
        try:
            subprocess.check_output(args, universal_newlines=True)
        except subprocess.CalledProcessError as e:
            sys.stderr.write(e.output)
            raise

    return output