예제 #1
0
def find_dupes(source, allowed_dupes, bail=True):
    md5_chunk_size = 1024 * 10
    allowed_dupes = set(allowed_dupes)
    md5s = OrderedDict()
    for p, f in UnpackFinder(source):
        md5 = hashlib.md5()
        content_size = 0
        for buf in iter(functools.partial(f.open().read, md5_chunk_size), b""):
            md5.update(six.ensure_binary(buf))
            content_size += len(six.ensure_binary(buf))
        m = md5.digest()
        if m not in md5s:
            if isinstance(f, DeflatedFile):
                compressed = f.file.compressed_size
            else:
                compressed = content_size
            md5s[m] = (content_size, compressed, [])
        md5s[m][2].append(p)
    total = 0
    total_compressed = 0
    num_dupes = 0
    unexpected_dupes = []
    for m, (size, compressed, paths) in sorted(six.iteritems(md5s),
                                               key=lambda x: x[1][1]):
        if len(paths) > 1:
            _compressed = " (%d compressed)" % compressed if compressed != size else ""
            _times = " (%d times)" % (len(paths) - 1) if len(paths) > 2 else ""
            print("Duplicates {} bytes{}{}:".format(size, _compressed, _times))
            print("".join("  %s\n" % p for p in paths))
            total += (len(paths) - 1) * size
            total_compressed += (len(paths) - 1) * compressed
            num_dupes += 1

            for p in paths:
                if not is_l10n_file(p) and normalize_path(
                        p) not in allowed_dupes:
                    unexpected_dupes.append(p)

    if num_dupes:
        total_compressed = ("%d compressed" % total_compressed
                            if total_compressed != total else "uncompressed")
        print("WARNING: Found {} duplicated files taking {} bytes ({})".format(
            num_dupes, total, total_compressed))

    if unexpected_dupes:
        errortype = "ERROR" if bail else "WARNING"
        print("{}: The following duplicated files are not allowed:".format(
            errortype))
        print("\n".join(unexpected_dupes))
        if bail:
            sys.exit(1)
예제 #2
0
    def process_package_artifact(self, filename, processed_filename):
        # Extract all .so files into the root, which will get copied into dist/bin.
        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                    continue

                dirname, basename = os.path.split(p)
                self.log(logging.INFO, 'artifact',
                    {'basename': basename},
                   'Adding {basename} to processed archive')

                basedir = 'bin'
                if not basename.endswith('.so'):
                    basedir = mozpath.join('bin', dirname.lstrip('assets/'))
                basename = mozpath.join(basedir, basename)
                writer.add(basename.encode('utf-8'), f.open())
예제 #3
0
def find_dupes(source, allowed_dupes, bail=True):
    allowed_dupes = set(allowed_dupes)
    md5s = OrderedDict()
    for p, f in UnpackFinder(source):
        content = f.open().read()
        m = hashlib.md5(content).digest()
        if m not in md5s:
            if isinstance(f, DeflatedFile):
                compressed = f.file.compressed_size
            else:
                compressed = len(content)
            md5s[m] = (len(content), compressed, [])
        md5s[m][2].append(p)
    total = 0
    total_compressed = 0
    num_dupes = 0
    unexpected_dupes = []
    for m, (size, compressed, paths) in sorted(md5s.iteritems(),
                                               key=lambda x: x[1][1]):
        if len(paths) > 1:
            print 'Duplicates %d bytes%s%s:' % (
                size, ' (%d compressed)' %
                compressed if compressed != size else '', ' (%d times)' %
                (len(paths) - 1) if len(paths) > 2 else '')
            print ''.join('  %s\n' % p for p in paths)
            total += (len(paths) - 1) * size
            total_compressed += (len(paths) - 1) * compressed
            num_dupes += 1

            for p in paths:
                if not is_l10n_file(p) and normalize_path(
                        p) not in allowed_dupes:
                    unexpected_dupes.append(p)

    if num_dupes:
        print "WARNING: Found %d duplicated files taking %d bytes (%s)" % \
              (num_dupes, total,
               '%d compressed' % total_compressed if total_compressed != total
                                                  else 'uncompressed')

    if unexpected_dupes:
        errortype = "ERROR" if bail else "WARNING"
        print "%s: The following duplicated files are not allowed:" % errortype
        print "\n".join(unexpected_dupes)
        if bail:
            sys.exit(1)
예제 #4
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False
        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
                if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                    continue

                # strip off the relative "firefox/" bit from the path:
                basename = mozpath.relpath(p, "firefox")
                basename = mozpath.join('bin', basename)
                self.log(logging.INFO, 'artifact',
                    {'basename': basename},
                    'Adding {basename} to processed archive')
                writer.add(basename.encode('utf-8'), f.open(), mode=f.mode)
                added_entry = True

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}"'
                             'matched an archive path.'.format(
                                 patterns=self.artifact_patterns))
예제 #5
0
    def process_package_artifact(self, filename, processed_filename):
        added_entry = False

        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            with tarfile.open(filename) as reader:
                for p, f in UnpackFinder(TarFinder(filename, reader)):
                    if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
                        continue

                    # We strip off the relative "firefox/" bit from the path,
                    # but otherwise preserve it.
                    destpath = mozpath.join('bin',
                                            mozpath.relpath(p, "firefox"))
                    self.log(logging.INFO, 'artifact',
                             {'destpath': destpath},
                             'Adding {destpath} to processed archive')
                    writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)
                    added_entry = True

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}" '
                             'matched an archive path.'.format(
                                 patterns=LinuxArtifactJob.package_artifact_patterns))
예제 #6
0
    def process_package_artifact(self, filename, processed_filename):
        tempdir = tempfile.mkdtemp()
        try:
            self.log(logging.INFO, 'artifact', {'tempdir': tempdir},
                     'Unpacking DMG into {tempdir}')
            mozinstall.install(
                filename,
                tempdir)  # Doesn't handle already mounted DMG files nicely:

            # InstallError: Failed to install "/Users/nalexander/.mozbuild/package-frontend/b38eeeb54cdcf744-firefox-44.0a1.en-US.mac.dmg (local variable 'appDir' referenced before assignment)"

            #   File "/Users/nalexander/Mozilla/gecko/mobile/android/mach_commands.py", line 250, in artifact_install
            #     return artifacts.install_from(source, self.distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 457, in install_from
            #     return self.install_from_hg(source, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 445, in install_from_hg
            #     return self.install_from_url(url, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 418, in install_from_url
            #     return self.install_from_file(filename, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 336, in install_from_file
            #     mozinstall.install(filename, tempdir)
            #   File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 117, in install
            #     install_dir = _install_dmg(src, dest)
            #   File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 261, in _install_dmg
            #     subprocess.call('hdiutil detach %s -quiet' % appDir,

            bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
            if len(bundle_dirs) != 1:
                raise ValueError(
                    'Expected one source bundle, found: {}'.format(
                        bundle_dirs))
            [source] = bundle_dirs

            # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
            paths_no_keep_path = (
                'Contents/MacOS',
                [
                    'crashreporter.app/Contents/MacOS/crashreporter',
                    'firefox',
                    'firefox-bin',
                    'libfreebl3.dylib',
                    'liblgpllibs.dylib',
                    # 'liblogalloc.dylib',
                    'libmozglue.dylib',
                    'libnss3.dylib',
                    'libnssckbi.dylib',
                    'libnssdbm3.dylib',
                    'libplugin_child_interpose.dylib',
                    # 'libreplace_jemalloc.dylib',
                    # 'libreplace_malloc.dylib',
                    'libmozavutil.dylib',
                    'libmozavcodec.dylib',
                    'libsoftokn3.dylib',
                    'plugin-container.app/Contents/MacOS/plugin-container',
                    'updater.app/Contents/MacOS/org.mozilla.updater',
                    # 'xpcshell',
                    'XUL',
                ])

            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
            paths_keep_path = (
                'Contents/Resources',
                [
                    'browser/components/libbrowsercomps.dylib',
                    'dependentlibs.list',
                    # 'firefox',
                    'gmp-clearkey/0.1/libclearkey.dylib',
                    # 'gmp-fake/1.0/libfake.dylib',
                    # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                    '**/interfaces.xpt',
                ])

            with JarWriter(file=processed_filename,
                           optimize=False,
                           compress_level=5) as writer:
                root, paths = paths_no_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact', {'path': p},
                                 'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', os.path.basename(p))
                        writer.add(destpath.encode('utf-8'), f, mode=f.mode)

                root, paths = paths_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact', {'path': p},
                                 'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', p)
                        writer.add(destpath.encode('utf-8'),
                                   f.open(),
                                   mode=f.mode)

        finally:
            try:
                shutil.rmtree(tempdir)
            except (OSError, IOError):
                self.log(logging.WARN, 'artifact', {'tempdir': tempdir},
                         'Unable to delete {tempdir}')
                pass
예제 #7
0
    def process_package_artifact(self, filename, processed_filename):
        tempdir = tempfile.mkdtemp()
        oldcwd = os.getcwd()
        try:
            self.log(logging.INFO, 'artifact',
                {'tempdir': tempdir},
                'Unpacking DMG into {tempdir}')
            if self._substs['HOST_OS_ARCH'] == 'Linux':
                # This is a cross build, use hfsplus and dmg tools to extract the dmg.
                os.chdir(tempdir)
                with open(os.devnull, 'wb') as devnull:
                    subprocess.check_call([
                        self._substs['DMG_TOOL'],
                        'extract',
                        filename,
                        'extracted_img',
                    ], stdout=devnull)
                    subprocess.check_call([
                        self._substs['HFS_TOOL'],
                        'extracted_img',
                        'extractall'
                    ], stdout=devnull)
            else:
                mozinstall.install(filename, tempdir)

            bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
            if len(bundle_dirs) != 1:
                raise ValueError('Expected one source bundle, found: {}'.format(bundle_dirs))
            [source] = bundle_dirs

            # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
            paths_no_keep_path = ('Contents/MacOS', [
                'crashreporter.app/Contents/MacOS/crashreporter',
                'crashreporter.app/Contents/MacOS/minidump-analyzer',
                'firefox',
                'firefox-bin',
                'libfreebl3.dylib',
                'liblgpllibs.dylib',
                # 'liblogalloc.dylib',
                'libmozglue.dylib',
                'libnss3.dylib',
                'libnssckbi.dylib',
                'libnssdbm3.dylib',
                'libplugin_child_interpose.dylib',
                # 'libreplace_jemalloc.dylib',
                # 'libreplace_malloc.dylib',
                'libmozavutil.dylib',
                'libmozavcodec.dylib',
                'libsoftokn3.dylib',
                'pingsender',
                'plugin-container.app/Contents/MacOS/plugin-container',
                'updater.app/Contents/MacOS/org.mozilla.updater',
                # 'xpcshell',
                'XUL',
            ])

            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
            paths_keep_path = ('Contents/Resources', [
                'browser/components/libbrowsercomps.dylib',
                'dependentlibs.list',
                # 'firefox',
                'gmp-clearkey/0.1/libclearkey.dylib',
                # 'gmp-fake/1.0/libfake.dylib',
                # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                '**/interfaces.xpt',
            ])

            with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
                root, paths = paths_no_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', os.path.basename(p))
                        writer.add(destpath.encode('utf-8'), f, mode=f.mode)

                root, paths = paths_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', p)
                        writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)

        finally:
            os.chdir(oldcwd)
            try:
                shutil.rmtree(tempdir)
            except (OSError, IOError):
                self.log(logging.WARN, 'artifact',
                    {'tempdir': tempdir},
                    'Unable to delete {tempdir}')
                pass
예제 #8
0
    def process_package_artifact(self, filename, processed_filename):
        tempdir = tempfile.mkdtemp()
        try:
            self.log(logging.INFO, 'artifact',
                {'tempdir': tempdir},
                'Unpacking DMG into {tempdir}')
            mozinstall.install(filename, tempdir) # Doesn't handle already mounted DMG files nicely:

            # InstallError: Failed to install "/Users/nalexander/.mozbuild/package-frontend/b38eeeb54cdcf744-firefox-44.0a1.en-US.mac.dmg (local variable 'appDir' referenced before assignment)"

            #   File "/Users/nalexander/Mozilla/gecko/mobile/android/mach_commands.py", line 250, in artifact_install
            #     return artifacts.install_from(source, self.distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 457, in install_from
            #     return self.install_from_hg(source, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 445, in install_from_hg
            #     return self.install_from_url(url, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 418, in install_from_url
            #     return self.install_from_file(filename, distdir)
            #   File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 336, in install_from_file
            #     mozinstall.install(filename, tempdir)
            #   File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 117, in install
            #     install_dir = _install_dmg(src, dest)
            #   File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 261, in _install_dmg
            #     subprocess.call('hdiutil detach %s -quiet' % appDir,

            bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
            if len(bundle_dirs) != 1:
                raise ValueError('Expected one source bundle, found: {}'.format(bundle_dirs))
            [source] = bundle_dirs

            # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
            paths_no_keep_path = ('Contents/MacOS', [
                'crashreporter.app/Contents/MacOS/crashreporter',
                'firefox',
                'firefox-bin',
                'libfreebl3.dylib',
                'liblgpllibs.dylib',
                # 'liblogalloc.dylib',
                'libmozglue.dylib',
                'libnss3.dylib',
                'libnssckbi.dylib',
                'libnssdbm3.dylib',
                'libplugin_child_interpose.dylib',
                # 'libreplace_jemalloc.dylib',
                # 'libreplace_malloc.dylib',
                'libmozavutil.dylib',
                'libmozavcodec.dylib',
                'libsoftokn3.dylib',
                'plugin-container.app/Contents/MacOS/plugin-container',
                'updater.app/Contents/MacOS/org.mozilla.updater',
                # 'xpcshell',
                'XUL',
            ])

            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
            paths_keep_path = ('Contents/Resources', [
                'browser/components/libbrowsercomps.dylib',
                'dependentlibs.list',
                # 'firefox',
                'gmp-clearkey/0.1/libclearkey.dylib',
                # 'gmp-fake/1.0/libfake.dylib',
                # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                '**/interfaces.xpt',
            ])

            with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
                root, paths = paths_no_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', os.path.basename(p))
                        writer.add(destpath.encode('utf-8'), f, mode=f.mode)

                root, paths = paths_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', p)
                        writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)

        finally:
            try:
                shutil.rmtree(tempdir)
            except (OSError, IOError):
                self.log(logging.WARN, 'artifact',
                    {'tempdir': tempdir},
                    'Unable to delete {tempdir}')
                pass
예제 #9
0
def fat_aar(distdir,
            aars_paths,
            no_process=False,
            no_compatibility_check=False):
    if no_process:
        print('Not processing architecture-specific artifact Maven AARs.')
        return 0

    # Map {filename: {fingerprint: [arch1, arch2, ...]}}.
    diffs = defaultdict(lambda: defaultdict(list))
    missing_arch_prefs = set()
    # Collect multi-architecture inputs to the fat AAR.
    copier = FileCopier()

    for arch, aar_path in aars_paths.items():
        # Map old non-architecture-specific path to new architecture-specific path.
        old_rewrite_map = {
            'greprefs.js':
            '{}/greprefs.js'.format(arch),
            'defaults/pref/geckoview-prefs.js':
            'defaults/pref/{}/geckoview-prefs.js'.format(arch),
        }

        # Architecture-specific preferences files.
        arch_prefs = set(old_rewrite_map.values())
        missing_arch_prefs |= set(arch_prefs)

        jar_finder = JarFinder(aar_path, JarReader(aar_path))
        for path, fileobj in UnpackFinder(jar_finder):
            # Native libraries go straight through.
            if mozpath.match(path, 'jni/**'):
                copier.add(path, fileobj)

            elif path in arch_prefs:
                copier.add(path, fileobj)

            elif path in ('classes.jar', 'annotations.zip'):
                # annotations.zip differs due to timestamps, but the contents should not.

                # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt,
                # and it's not worth working around, so we use Python's zip functionality
                # instead.
                z = ZipFile(BytesIO(fileobj.open().read()))
                for r in z.namelist():
                    fingerprint = sha1(z.open(r).read()).hexdigest()
                    diffs['{}!/{}'.format(path, r)][fingerprint].append(arch)

            else:
                fingerprint = sha1(fileobj.open().read()).hexdigest()
                # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here,
                # since in practice they will never overlap.
                diffs[path][fingerprint].append(arch)

            missing_arch_prefs.discard(path)

    # Some differences are allowed across the architecture-specific AARs.  We could allow-list
    # the actual content, but it's not necessary right now.
    allow_pattern_list = {
        'AndroidManifest.xml',  # Min SDK version is different for 32- and 64-bit builds.
        'classes.jar!/org/mozilla/gecko/util/HardwareUtils.class',  # Min SDK as well.
        'classes.jar!/org/mozilla/geckoview/BuildConfig.class',
        # Each input captures its CPU architecture.
        'chrome/toolkit/content/global/buildconfig.html',
        # Bug 1556162: localized resources are not deterministic across
        # per-architecture builds triggered from the same push.
        '**/*.ftl',
        '**/*.dtd',
        '**/*.properties',
    }

    not_allowed = OrderedDict()

    def format_diffs(ds):
        # Like '  armeabi-v7a, arm64-v8a -> XXX\n  x86, x86_64 -> YYY'.
        return '\n'.join(
            sorted('  {archs} -> {fingerprint}'.format(
                archs=', '.join(sorted(archs)), fingerprint=fingerprint)
                   for fingerprint, archs in ds.iteritems()))

    for p, ds in sorted(diffs.iteritems()):
        if len(ds) <= 1:
            # Only one hash across all inputs: roll on.
            continue

        if any(mozpath.match(p, pat) for pat in allow_pattern_list):
            print(
                'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
                .format(path=p, ds_repr=format_diffs(ds)))
            continue

        not_allowed[p] = ds

    for p, ds in not_allowed.iteritems():
        print(
            'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'
            .format(path=p, ds_repr=format_diffs(ds)))

    for missing in sorted(missing_arch_prefs):
        print(
            'Disallowed: Inputs missing expected architecture-specific input: {missing}'
            .format(missing=missing))

    if not no_compatibility_check and (missing_arch_prefs or not_allowed):
        return 1

    output_dir = mozpath.join(distdir, 'output')
    copier.copy(output_dir)

    return 0
예제 #10
0
    def process_package_artifact(self, filename, processed_filename):
        tempdir = tempfile.mkdtemp()
        oldcwd = os.getcwd()
        try:
            self.log(logging.INFO, 'artifact',
                {'tempdir': tempdir},
                'Unpacking DMG into {tempdir}')
            if self._substs['HOST_OS_ARCH'] == 'Linux':
                # This is a cross build, use hfsplus and dmg tools to extract the dmg.
                os.chdir(tempdir)
                with open(os.devnull, 'wb') as devnull:
                    subprocess.check_call([
                        self._substs['DMG_TOOL'],
                        'extract',
                        filename,
                        'extracted_img',
                    ], stdout=devnull)
                    subprocess.check_call([
                        self._substs['HFS_TOOL'],
                        'extracted_img',
                        'extractall'
                    ], stdout=devnull)
            else:
                mozinstall.install(filename, tempdir)

            bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
            if len(bundle_dirs) != 1:
                raise ValueError('Expected one source bundle, found: {}'.format(bundle_dirs))
            [source] = bundle_dirs

            # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
            paths_no_keep_path = ('Contents/MacOS', [
                'crashreporter.app/Contents/MacOS/crashreporter',
                'firefox',
                'firefox-bin',
                'libfreebl3.dylib',
                'liblgpllibs.dylib',
                # 'liblogalloc.dylib',
                'libmozglue.dylib',
                'libnss3.dylib',
                'libnssckbi.dylib',
                'libnssdbm3.dylib',
                'libplugin_child_interpose.dylib',
                # 'libreplace_jemalloc.dylib',
                # 'libreplace_malloc.dylib',
                'libmozavutil.dylib',
                'libmozavcodec.dylib',
                'libsoftokn3.dylib',
                'pingsender',
                'plugin-container.app/Contents/MacOS/plugin-container',
                'updater.app/Contents/MacOS/org.mozilla.updater',
                # 'xpcshell',
                'XUL',
            ])

            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
            paths_keep_path = [
                ('Contents/MacOS', [
                    'crashreporter.app/Contents/MacOS/minidump-analyzer',
                ]),
                ('Contents/Resources', [
                    'browser/components/libbrowsercomps.dylib',
                    'dependentlibs.list',
                    # 'firefox',
                    'gmp-clearkey/0.1/libclearkey.dylib',
                    # 'gmp-fake/1.0/libfake.dylib',
                    # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                    '**/interfaces.xpt',
                ]),
            ]

            with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
                root, paths = paths_no_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', os.path.basename(p))
                        writer.add(destpath.encode('utf-8'), f, mode=f.mode)

                for root, paths in paths_keep_path:
                    finder = UnpackFinder(mozpath.join(source, root))
                    for path in paths:
                        for p, f in finder.find(path):
                            self.log(logging.INFO, 'artifact',
                                     {'path': p},
                                     'Adding {path} to processed archive')
                            destpath = mozpath.join('bin', p)
                            writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)

        finally:
            os.chdir(oldcwd)
            try:
                shutil.rmtree(tempdir)
            except (OSError, IOError):
                self.log(logging.WARN, 'artifact',
                    {'tempdir': tempdir},
                    'Unable to delete {tempdir}')
                pass
예제 #11
0
def repack(source, l10n):
    finder = UnpackFinder(source)
    l10n_finder = UnpackFinder(l10n)
    copier = FileCopier()
    if finder.kind == 'flat':
        formatter = FlatFormatter(copier)
    elif finder.kind == 'jar':
        formatter = JarFormatter(copier, optimize=finder.optimizedjars)
    elif finder.kind == 'omni':
        formatter = OmniJarFormatter(copier, finder.omnijar,
                                     optimize=finder.optimizedjars)

    # Read all manifest entries from the packaged directory.
    manifests = dict((p, m) for p, m in finder.find('**/*.manifest')
                     if is_manifest(p))
    assert all(isinstance(m, ManifestFile) for m in manifests.itervalues())
    entries = [e for m in manifests.itervalues() for e in m if e.localized]
    # Find unique locales used in these manifest entries.
    locales = list(set(e.id for e in entries if isinstance(e, ManifestLocale)))
    # Find all paths whose manifest are included by no other manifest.
    includes = set(mozpack.path.join(e.base, e.relpath)
                   for m in manifests.itervalues()
                   for e in m if isinstance(e, Manifest))
    bases = [mozpack.path.dirname(p) for p in set(manifests.keys()) - includes]

    # Read all manifest entries from the langpack directory.
    manifests = [m for p, m in l10n_finder.find('**/*.manifest')
                 if is_manifest(p)]
    assert all(isinstance(m, ManifestFile) for m in manifests)
    l10n_entries = [e for m in manifests for e in m if e.localized]
    # Find unique locales used in these manifest entries.
    l10n_locales = list(set(e.id for e in l10n_entries
                            if isinstance(e, ManifestLocale)))

    # The code further below assumes there's only one locale replaced with
    # another one.
    if len(locales) > 1 or len(l10n_locales) > 1:
        errors.fatal("Multiple locales aren't supported")
    locale = locales[0]
    l10n_locale = l10n_locales[0]

    # For each base directory, store what path a locale chrome package name
    # corresponds to.
    # e.g., for the following entry under app/chrome:
    #     locale foo en-US path/to/files
    # keep track that the locale path for foo in app is
    # app/chrome/path/to/files.
    l10n_paths = {}
    for e in l10n_entries:
        if isinstance(e, ManifestChrome):
            base = mozpack.path.basedir(e.path, bases)
            if not base in l10n_paths:
                l10n_paths[base] = {}
            l10n_paths[base][e.name] = e.path

    # For chrome and non chrome files or directories, store what langpack path
    # corresponds to a package path.
    paths = dict((e.path,
                  l10n_paths[mozpack.path.basedir(e.path, bases)][e.name])
                 for e in entries if isinstance(e, ManifestEntryWithRelPath))

    for path in NON_CHROME:
        for p, f in l10n_finder.find(path):
            paths[p] = p

    # Create a new package, with non localized bits coming from the original
    # package, and localized bits coming from the langpack.
    packager = SimplePackager(formatter)
    for p, f in finder:
        if is_manifest(p):
            # Remove localized manifest entries.
            for e in [e for e in f if e.localized]:
                f.remove(e)
        base = mozpack.path.basedir(p, paths.keys())
        if base:
            # If the path is one that needs a locale replacement, use the
            # corresponding file from the langpack.
            subpath = mozpack.path.relpath(p, base)
            path = mozpack.path.normpath(mozpack.path.join(paths[base],
                                                           subpath))
            files = [f for p, f in l10n_finder.find(path)]
            if len(files) == 0 and base in NON_CHROME:
                path = path.replace(locale, l10n_locale)
                files = [f for p, f in l10n_finder.find(path)]
            if len(files) == 0:
                if not base in NON_CHROME:
                    errors.error("Missing file: %s" % os.path.join(l10n, path))
            else:
                packager.add(path, files[0])
        else:
            packager.add(p, f)

    # Add localized manifest entries from the langpack.
    l10n_manifests = []
    for base in set(e.base for e in l10n_entries):
        m = ManifestFile(base)
        for e in l10n_entries:
            if e.base == base:
                m.add(e)
        path = mozpack.path.join(base, 'chrome.%s.manifest' % l10n_locale)
        l10n_manifests.append((path, m))
    bases = packager.get_bases()
    for path, m in l10n_manifests:
        base = mozpack.path.basedir(path, bases)
        packager.add(path, m)
        # Add a "manifest $path" entry in the top manifest under that base.
        m = ManifestFile(base)
        m.add(Manifest(base, mozpack.path.relpath(path, base)))
        packager.add(mozpack.path.join(base, 'chrome.manifest'), m)

    packager.close()

    # Add any remaining non chrome files.
    for base in NON_CHROME:
        for p, f in l10n_finder.find(base):
            if not formatter.contains(p):
                formatter.add(p, f)

    # Transplant jar preloading information.
    for path, log in finder.jarlogs.iteritems():
        assert isinstance(copier[path], Jarrer)
        copier[path].preload([l.replace(locale, l10n_locale) for l in log])

    copier.copy(source)
    generate_precomplete(source)
예제 #12
0
파일: msix.py 프로젝트: Floflis/gecko-b2g
def repackage_msix(
    dir_or_package,
    channel=None,
    branding=None,
    template=None,
    distribution_dirs=[],
    locale_allowlist=set(),
    version=None,
    vendor=None,
    displayname=None,
    app_name="firefox",
    identity=None,
    publisher=None,
    publisher_display_name="Mozilla Corporation",
    arch=None,
    output=None,
    force=False,
    log=None,
    verbose=False,
    makeappx=None,
):
    if not channel:
        raise Exception("channel is required")
    if channel not in ["official", "beta", "aurora", "nightly", "unofficial"]:
        raise Exception("channel is unrecognized: {}".format(channel))

    if not branding:
        raise Exception("branding dir is required")
    if not os.path.isdir(branding):
        raise Exception("branding dir {} does not exist".format(branding))

    # TODO: maybe we can fish this from the package directly?  Maybe from a DLL,
    # maybe from application.ini?
    if arch is None or arch not in _MSIX_ARCH.keys():
        raise Exception(
            "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys())
        )

    if not os.path.exists(dir_or_package):
        raise Exception("{} does not exist".format(dir_or_package))

    if (
        os.path.isfile(dir_or_package)
        and os.path.splitext(dir_or_package)[1] == ".msix"
    ):
        # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
        msix_dir = mozpath.normsep(
            mozpath.join(
                get_state_dir(),
                "cache",
                "mach-msix",
                "msix-unpack",
            )
        )

        if os.path.exists(msix_dir):
            shutil.rmtree(msix_dir)
        ensureParentDir(msix_dir)

        dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose)

    log(
        logging.INFO,
        "msix",
        {
            "input": dir_or_package,
        },
        "Adding files from '{input}'",
    )

    if os.path.isdir(dir_or_package):
        finder = FileFinder(dir_or_package)
    else:
        finder = JarFinder(dir_or_package, JarReader(dir_or_package))

    values = get_application_ini_values(
        finder,
        dict(section="App", value="CodeName", fallback="Name"),
        dict(section="App", value="Vendor"),
    )
    first = next(values)
    displayname = displayname or "Mozilla {}".format(first)
    second = next(values)
    vendor = vendor or second

    # For `AppConstants.jsm` and `brand.properties`, which are in the omnijar in packaged builds.
    # The nested langpack XPI files can't be read by `mozjar.py`.
    unpack_finder = UnpackFinder(finder, unpack_xpi=False)

    if not version:
        values = get_appconstants_jsm_values(
            unpack_finder, "MOZ_APP_VERSION_DISPLAY", "MOZ_BUILDID"
        )
        display_version = next(values)
        buildid = next(values)
        version = get_embedded_version(display_version, buildid)
        log(
            logging.INFO,
            "msix",
            {
                "version": version,
                "display_version": display_version,
                "buildid": buildid,
            },
            "AppConstants.jsm display version is '{display_version}' and build ID is '{buildid}':"
            + " embedded version will be '{version}'",
        )

    # TODO: Bug 1721922: localize this description via Fluent.
    lines = []
    for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"):
        lines.extend(
            line
            for line in f.open().read().decode("utf-8").splitlines()
            if "brandFullName" in line
        )
    (brandFullName,) = lines  # We expect exactly one definition.
    _, _, brandFullName = brandFullName.partition("=")
    brandFullName = brandFullName.strip()

    # We don't have a build at repackage-time to gives us this value, and the
    # source of truth is a branding-specific `configure.sh` shell script that we
    # can't easily evaluate completely here.  Instead, we take the last value
    # from `configure.sh`.
    lines = [
        line
        for line in open(mozpath.join(branding, "configure.sh")).readlines()
        if "MOZ_IGECKOBACKCHANNEL_IID" in line
    ]
    MOZ_IGECKOBACKCHANNEL_IID = lines[-1]
    _, _, MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.partition("=")
    MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.strip()
    if MOZ_IGECKOBACKCHANNEL_IID.startswith(('"', "'")):
        MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID[1:-1]

    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    output_dir = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel)
        )
    )

    if channel == "beta":
        # Release (official) and Beta share branding.  Differentiate Beta a little bit.
        displayname += " Beta"
        brandFullName += " Beta"

    # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta
    # Package Root'.  This is `BrandFullName` in the installer, and we want to
    # be close but to not match.  By not matching, we hope to prevent confusion
    # and/or errors between regularly installed builds and App Package builds.
    instdir = "{} Package Root".format(displayname)

    # The standard package name is like "CompanyNoSpaces.ProductNoSpaces".
    identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "")

    # We might want to include the publisher ID hash here.  I.e.,
    # "__{publisherID}".  My locally produced MSIX was named like
    # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a
    # missing field, but it's not necessary, since this is just an output file name.
    package_output_name = "{identity}_{version}_{arch}".format(
        identity=identity, version=version, arch=_MSIX_ARCH[arch]
    )
    # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
    default_output = mozpath.normsep(
        mozpath.join(
            get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name)
        )
    )
    output = output or default_output
    log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}")

    m = InstallManifest()
    m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri")

    m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets")
    m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS")

    copier = FileCopier()

    # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead.
    for p, f in finder:
        if not os.path.isdir(dir_or_package):
            # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe".
            pp = os.path.relpath(p, "firefox")
        else:
            # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already.
            pp = p

        if pp.startswith("distribution"):
            # Treat any existing distribution as a distribution directory,
            # potentially with language packs. This makes it easy to repack
            # unpacked MSIXes.
            distribution_dir = mozpath.join(dir_or_package, "distribution")
            if distribution_dir not in distribution_dirs:
                distribution_dirs.append(distribution_dir)

            continue

        copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f)

    # Locales to declare as supported in `AppxManifest.xml`.
    locales = set(["en-US"])

    for distribution_dir in [
        mozpath.join(template, "distribution")
    ] + distribution_dirs:
        log(
            logging.INFO,
            "msix",
            {"dir": distribution_dir},
            "Adding distribution files from {dir}",
        )

        # In automation, we have no easy way to remap the names of artifacts fetched from dependent
        # tasks.  In particular, langpacks will be named like `target.langpack.xpi`.  The fetch
        # tasks do allow us to put them in a per-locale directory, so that the entire set can be
        # fetched.  Here we remap the names.
        finder = FileFinder(distribution_dir)

        for p, f in finder:
            locale = None
            if os.path.basename(p) == "target.langpack.xpi":
                # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE".  This is how langpacks
                # are presented in CI.
                base, locale = os.path.split(os.path.dirname(p))

                # Like "locale-LOCALE/[email protected]".  This is what AMO
                # serves and how flatpak builds name langpacks, but not how snap builds name
                # langpacks.  I can't explain the discrepancy.
                dest = mozpath.normsep(
                    mozpath.join(
                        base,
                        f"locale-{locale}",
                        f"langpack-{locale}@firefox.mozilla.org.xpi",
                    )
                )

                log(
                    logging.DEBUG,
                    "msix",
                    {"path": p, "dest": dest},
                    "Renaming langpack {path} to {dest}",
                )

            elif os.path.basename(p).startswith("langpack-"):
                # Turn "/path/to/[email protected]" into "LOCALE".  This is
                # how langpacks are presented from an unpacked MSIX.
                _, _, locale = os.path.basename(p).partition("langpack-")
                locale, _, _ = locale.partition("@")
                dest = p

            else:
                dest = p

            if locale:
                locale = locale.strip().lower()
                locales.add(locale)
                log(
                    logging.DEBUG,
                    "msix",
                    {"locale": locale, "dest": dest},
                    "Distributing locale '{locale}' from {dest}",
                )

            dest = mozpath.normsep(
                mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest)
            )
            if copier.contains(dest):
                log(
                    logging.INFO,
                    "msix",
                    {"dest": dest, "path": mozpath.join(finder.base, p)},
                    "Skipping duplicate: {dest} from {path}",
                )
                continue

            log(
                logging.DEBUG,
                "msix",
                {"dest": dest, "path": mozpath.join(finder.base, p)},
                "Adding distribution path: {dest} from {path}",
            )

            copier.add(
                dest,
                f,
            )

    locales.remove("en-US")

    # Windows MSIX packages support a finite set of locales: see
    # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in
    # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales.
    # We distribute all of the langpacks supported by the release channel in our MSIX, which is
    # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales.  But we
    # only advertise support in the App manifest for the intersection of that set and the set of
    # supported locales.
    #
    # We distribute all langpacks to avoid the following issue.  Suppose a user manually installs a
    # langpack that is not supported by Windows, and then updates the installed MSIX package.  MSIX
    # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to
    # update the langpack before the update.  But, since all langpacks are bundled with the MSIX,
    # that langpack will be up-to-date, preventing one class of YSOD.
    unadvertised = set()
    if locale_allowlist:
        unadvertised = locales - locale_allowlist
        locales = locales & locale_allowlist
    for locale in sorted(unadvertised):
        log(
            logging.INFO,
            "msix",
            {"locale": locale},
            "Not advertising distributed locale '{locale}' that is not recognized by Windows",
        )

    locales = ["en-US"] + list(sorted(locales))
    resource_language_list = "\n".join(
        f'    <Resource Language="{locale}" />' for locale in sorted(locales)
    )

    defines = {
        "APPX_ARCH": _MSIX_ARCH[arch],
        "APPX_DISPLAYNAME": brandFullName,
        "APPX_DESCRIPTION": brandFullName,
        # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or
        # 'Mozilla.MozillaFirefoxNightly'.
        "APPX_IDENTITY": identity,
        # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox
        # Beta Package Root'.  See above.
        "APPX_INSTDIR": instdir,
        # Like 'Firefox%20Package%20Root'.
        "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir),
        "APPX_PUBLISHER": publisher,
        "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name,
        "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list,
        "APPX_VERSION": version,
        "MOZ_APP_DISPLAYNAME": displayname,
        "MOZ_APP_NAME": app_name,
        "MOZ_IGECKOBACKCHANNEL_IID": MOZ_IGECKOBACKCHANNEL_IID,
    }

    m.add_preprocess(
        mozpath.join(template, "AppxManifest.xml.in"),
        "AppxManifest.xml",
        [],
        defines=defines,
        marker="<!-- #",  # So that we can have well-formed XML.
    )
    m.populate_registry(copier)

    output_dir = mozpath.abspath(output_dir)
    ensureParentDir(output_dir)

    start = time.time()
    result = copier.copy(
        output_dir, remove_empty_directories=True, skip_if_older=not force
    )
    if log:
        log_copy_result(log, time.time() - start, output_dir, result)

    if verbose:
        # Dump AppxManifest.xml contents for ease of debugging.
        log(logging.DEBUG, "msix", {}, "AppxManifest.xml")
        log(logging.DEBUG, "msix", {}, ">>>")
        for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines():
            log(logging.DEBUG, "msix", {}, line[:-1])  # Drop trailing line terminator.
        log(logging.DEBUG, "msix", {}, "<<<")

    if not makeappx:
        makeappx = find_sdk_tool("makeappx.exe", log=log)
    if not makeappx:
        raise ValueError(
            "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH"
        )

    # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix`
    # supports only hyphen style.  `makeappx.exe` allows to overwrite and to
    # provide more feedback, so we prefer invoking with these flags.  This will
    # also accommodate `wine makeappx.exe`.
    stdout = subprocess.run(
        [makeappx], check=False, capture_output=True, universal_newlines=True
    ).stdout
    is_makeappx = "MakeAppx Tool" in stdout

    if is_makeappx:
        args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"]
    else:
        args = [makeappx, "pack", "-d", output_dir, "-p", output]
    if verbose and is_makeappx:
        args.append("/verbose")
    joined = " ".join(shlex_quote(arg) for arg in args)
    log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}")

    sys.stdout.flush()  # Otherwise the subprocess output can be interleaved.
    if verbose:
        subprocess.check_call(args, universal_newlines=True)
    else:
        # Suppress output unless we fail.
        try:
            subprocess.check_output(args, universal_newlines=True)
        except subprocess.CalledProcessError as e:
            sys.stderr.write(e.output)
            raise

    return output