def precompile_cache(formatter, source_path, gre_path, app_path): ''' Create startup cache for the given application directory, using the given GRE path. - formatter is a Formatter instance where to add the startup cache. - source_path is the base path of the package. - gre_path is the GRE path, relative to source_path. - app_path is the application path, relative to source_path. Startup cache for all resources under resource://app/ are generated, except when gre_path == app_path, in which case it's under resource://gre/. ''' from tempfile import mkstemp source_path = os.path.abspath(source_path) if app_path != gre_path: resource = 'app' else: resource = 'gre' app_path = os.path.join(source_path, app_path) gre_path = os.path.join(source_path, gre_path) fd, cache = mkstemp('.zip') os.close(fd) os.remove(cache) # For VC12, make sure we can find the right bitness of pgort120.dll env = os.environ.copy() if 'VS120COMNTOOLS' in env and not buildconfig.substs['HAVE_64BIT_OS']: vc12dir = os.path.abspath(os.path.join(env['VS120COMNTOOLS'], '../../VC/bin')) if os.path.exists(vc12dir): env['PATH'] = vc12dir + ';' + env['PATH'] try: if launcher.launch(['xpcshell', '-g', gre_path, '-a', app_path, '-f', os.path.join(os.path.dirname(__file__), 'precompile_cache.js'), '-e', 'precompile_startupcache("resource://%s/");' % resource], extra_linker_path=gre_path, extra_env={'MOZ_STARTUP_CACHE': cache, 'PATH': env['PATH']}): errors.fatal('Error while running startup cache precompilation') return from mozpack.mozjar import JarReader jar = JarReader(cache) resource = '/resource/%s/' % resource for f in jar: if resource in f.filename: path = f.filename[f.filename.index(resource) + len(resource):] if formatter.contains(path): formatter.add(f.filename, GeneratedFile(f.read())) jar.close() finally: if os.path.exists(cache): os.remove(cache)
def precompile_cache(registry, source_path, gre_path, app_path): ''' Create startup cache for the given application directory, using the given GRE path. - registry is a FileRegistry-like instance where to add the startup cache. - source_path is the base path of the package. - gre_path is the GRE path, relative to source_path. - app_path is the application path, relative to source_path. Startup cache for all resources under resource://app/ are generated, except when gre_path == app_path, in which case it's under resource://gre/. ''' from tempfile import mkstemp source_path = os.path.abspath(source_path) if app_path != gre_path: resource = 'app' else: resource = 'gre' app_path = os.path.join(source_path, app_path) gre_path = os.path.join(source_path, gre_path) fd, cache = mkstemp('.zip') if os.name == 'os2': # NS_NewLocalFile is strict about slashes cache = cache.replace('/', '\\') os.close(fd) os.remove(cache) try: extra_env = {'MOZ_STARTUP_CACHE': cache} if buildconfig.substs.get('MOZ_TSAN'): extra_env['TSAN_OPTIONS'] = 'report_bugs=0' if buildconfig.substs.get('MOZ_ASAN'): extra_env['ASAN_OPTIONS'] = 'detect_leaks=0' if launcher.launch(['xpcshell', '-g', gre_path, '-a', app_path, '-f', os.path.join(os.path.dirname(__file__), 'precompile_cache.js'), '-e', 'precompile_startupcache("resource://%s/");' % resource], extra_linker_path=gre_path, extra_env=extra_env): errors.fatal('Error while running startup cache precompilation') return from mozpack.mozjar import JarReader jar = JarReader(cache) resource = '/resource/%s/' % resource for f in jar: if resource in f.filename: path = f.filename[f.filename.index(resource) + len(resource):] if registry.contains(path): registry.add(f.filename, GeneratedFile(f.read())) jar.close() finally: if os.path.exists(cache): os.remove(cache)
def precompile_cache(formatter, source_path, gre_path, app_path): ''' Create startup cache for the given application directory, using the given GRE path. - formatter is a Formatter instance where to add the startup cache. - source_path is the base path of the package. - gre_path is the GRE path, relative to source_path. - app_path is the application path, relative to source_path. Startup cache for all resources under resource://app/ are generated, except when gre_path == app_path, in which case it's under resource://gre/. ''' from tempfile import mkstemp source_path = os.path.abspath(source_path) if app_path != gre_path: resource = 'app' else: resource = 'gre' app_path = os.path.join(source_path, app_path) gre_path = os.path.join(source_path, gre_path) fd, cache = mkstemp('.zip') os.close(fd) os.remove(cache) try: if launcher.launch(['xpcshell', '-g', gre_path, '-a', app_path, '-f', os.path.join(os.path.dirname(__file__), 'precompile_cache.js'), '-e', 'precompile_startupcache("resource://%s/");' % resource], extra_linker_path=gre_path, extra_env={'MOZ_STARTUP_CACHE': cache}): errors.fatal('Error while running startup cache precompilation') return from mozpack.mozjar import JarReader jar = JarReader(cache) resource = '/resource/%s/' % resource for f in jar: if resource in f.filename: path = f.filename[f.filename.index(resource) + len(resource):] if formatter.contains(path): formatter.add(f.filename, GeneratedFile(f.read())) jar.close() finally: if os.path.exists(cache): os.remove(cache)
def test_preload(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add('foo', 'foo') jar.add('bar', 'abcdefghijklmnopqrstuvwxyz') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz') jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, None) with JarWriter(fileobj=s) as jar: jar.add('foo', 'foo') jar.add('bar', 'abcdefghijklmnopqrstuvwxyz') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz') jar.preload(['baz/qux', 'bar']) jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, 'bar') files = [j for j in jar] self.assertEqual(files[0].filename, 'baz/qux') self.assertEqual(files[1].filename, 'bar') self.assertEqual(files[2].filename, 'foo')
def process_artifact(self, filename, processed_filename): # Extract all .so files into the root, which will get copied into dist/bin. with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: for f in JarReader(filename): if not f.filename.endswith('.so') and \ not f.filename in ('platform.ini', 'application.ini'): continue basename = os.path.basename(f.filename) self.log(logging.INFO, 'artifact', {'basename': basename}, 'Adding {basename} to processed archive') basename = mozpath.join('bin', basename) writer.add(basename.encode('utf-8'), f)
def process_package_artifact(self, filename, processed_filename): # Extract all .so files into the root, which will get copied into dist/bin. with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))): if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns): continue dirname, basename = os.path.split(p) self.log(logging.INFO, 'artifact', {'basename': basename}, 'Adding {basename} to processed archive') basedir = 'bin' if not basename.endswith('.so'): basedir = mozpath.join('bin', dirname.lstrip('assets/')) basename = mozpath.join(basedir, basename) writer.add(basename.encode('utf-8'), f.open())
def process_tests_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: reader = JarReader(filename) for filename, entry in reader.entries.iteritems(): for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns: if not mozpath.match(filename, pattern): continue destpath = mozpath.relpath(filename, src_prefix) destpath = mozpath.join(dest_prefix, destpath) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) added_entry = True break for files_entry in OBJDIR_TEST_FILES.values(): origin_pattern = files_entry['pattern'] leaf_filename = filename if 'dest' in files_entry: dest = files_entry['dest'] origin_pattern = mozpath.join(dest, origin_pattern) leaf_filename = filename[len(dest) + 1:] if mozpath.match(filename, origin_pattern): destpath = mozpath.join('..', files_entry['base'], leaf_filename) mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) if not added_entry: raise ValueError( 'Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=LinuxArtifactJob.test_artifact_patterns))
def process_package_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))): if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns): continue # strip off the relative "firefox/" bit from the path: basename = mozpath.relpath(p, "firefox") basename = mozpath.join('bin', basename) self.log(logging.INFO, 'artifact', {'basename': basename}, 'Adding {basename} to processed archive') writer.add(basename.encode('utf-8'), f.open(), mode=f.mode) added_entry = True if not added_entry: raise ValueError('Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=self.artifact_patterns))
def test_deflated_file(self): ''' Check that DeflatedFile.copy yields the proper content in the destination file in all situations that trigger different code paths (see TestFile.test_file) ''' src = self.tmppath('src.jar') dest = self.tmppath('dest') contents = {} with JarWriter(src) as jar: for content in samples: name = ''.join( random.choice(string.letters) for i in xrange(8)) jar.add(name, content, compress=True) contents[name] = content for j in JarReader(src): f = DeflatedFile(j) f.copy(dest) self.assertEqual(contents[j.filename], open(dest, 'rb').read())
def test_deflated_file(self): """ Check that DeflatedFile.copy yields the proper content in the destination file in all situations that trigger different code paths (see TestFile.test_file) """ src = self.tmppath("src.jar") dest = self.tmppath("dest") contents = {} with JarWriter(src) as jar: for content in samples: name = "".join( random.choice( "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") for i in range(8)) jar.add(name, content, compress=True) contents[name] = content for j in JarReader(src): f = DeflatedFile(j) f.copy(dest) self.assertEqual(contents[j.filename], open(dest, "rb").read())
def test_jarrer(self): copier = Jarrer() copier.add("foo/bar", GeneratedFile(b"foobar")) copier.add("foo/qux", GeneratedFile(b"fooqux")) copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz")) copier.add("bar", GeneratedFile(b"bar")) copier.add("qux/foo", GeneratedFile(b"quxfoo")) copier.add("qux/bar", GeneratedFile(b"")) dest = MockDest() copier.copy(dest) self.check_jar(dest, copier) copier.remove("foo") copier.add("test", GeneratedFile(b"test")) copier.copy(dest) self.check_jar(dest, copier) copier.remove("test") copier.add("test", GeneratedFile(b"replaced-content")) copier.copy(dest) self.check_jar(dest, copier) copier.copy(dest) self.check_jar(dest, copier) preloaded = ["qux/bar", "bar"] copier.preload(preloaded) copier.copy(dest) dest.seek(0) jar = JarReader(fileobj=dest) self.assertEqual( [f.filename for f in jar], preloaded + [p for p in copier.paths() if p not in preloaded], ) self.assertEqual(jar.last_preloaded, preloaded[-1])
def test_jar(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add('foo', 'foo') self.assertRaises(JarWriterError, jar.add, 'foo', 'bar') jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False) jar.add('baz\\backslash', 'aaaaaaaaaaaaaaa') files = [j for j in JarReader(fileobj=s)] self.assertEqual(files[0].filename, 'foo') self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), 'foo') self.assertEqual(files[1].filename, 'bar') self.assertTrue(files[1].compressed) self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') self.assertEqual(files[2].filename, 'baz/qux') self.assertFalse(files[2].compressed) self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') if os.sep == '\\': self.assertEqual(files[3].filename, 'baz/backslash', 'backslashes in filenames on Windows should get normalized') else: self.assertEqual(files[3].filename, 'baz\\backslash', 'backslashes in filenames on POSIX platform are untouched') s = MockDest() with JarWriter(fileobj=s, compress=False) as jar: jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') jar.add('foo', 'foo') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True) jar = JarReader(fileobj=s) files = [j for j in jar] self.assertEqual(files[0].filename, 'bar') self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') self.assertEqual(files[1].filename, 'foo') self.assertFalse(files[1].compressed) self.assertEqual(files[1].read(), 'foo') self.assertEqual(files[2].filename, 'baz/qux') self.assertTrue(files[2].compressed) self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') self.assertTrue('bar' in jar) self.assertTrue('foo' in jar) self.assertFalse('baz' in jar) self.assertTrue('baz/qux' in jar) self.assertTrue(jar['bar'], files[1]) self.assertTrue(jar['foo'], files[0]) self.assertTrue(jar['baz/qux'], files[2]) s.seek(0) jar = JarReader(fileobj=s) self.assertTrue('bar' in jar) self.assertTrue('foo' in jar) self.assertFalse('baz' in jar) self.assertTrue('baz/qux' in jar) files[0].seek(0) self.assertEqual(jar['bar'].filename, files[0].filename) self.assertEqual(jar['bar'].compressed, files[0].compressed) self.assertEqual(jar['bar'].read(), files[0].read()) files[1].seek(0) self.assertEqual(jar['foo'].filename, files[1].filename) self.assertEqual(jar['foo'].compressed, files[1].compressed) self.assertEqual(jar['foo'].read(), files[1].read()) files[2].seek(0) self.assertEqual(jar['baz/qux'].filename, files[2].filename) self.assertEqual(jar['baz/qux'].compressed, files[2].compressed) self.assertEqual(jar['baz/qux'].read(), files[2].read())
def test_jar(self): s = MockDest() with JarWriter(fileobj=s, optimize=self.optimize) as jar: jar.add('foo', 'foo') self.assertRaises(JarWriterError, jar.add, 'foo', 'bar') jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False) files = [j for j in JarReader(fileobj=s)] self.assertEqual(files[0].filename, 'foo') self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), 'foo') self.assertEqual(files[1].filename, 'bar') self.assertTrue(files[1].compressed) self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') self.assertEqual(files[2].filename, 'baz/qux') self.assertFalse(files[2].compressed) self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') s = MockDest() with JarWriter(fileobj=s, compress=False, optimize=self.optimize) as jar: jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') jar.add('foo', 'foo') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True) jar = JarReader(fileobj=s) files = [j for j in jar] self.assertEqual(files[0].filename, 'bar') self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') self.assertEqual(files[1].filename, 'foo') self.assertFalse(files[1].compressed) self.assertEqual(files[1].read(), 'foo') self.assertEqual(files[2].filename, 'baz/qux') self.assertTrue(files[2].compressed) self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') self.assertTrue('bar' in jar) self.assertTrue('foo' in jar) self.assertFalse('baz' in jar) self.assertTrue('baz/qux' in jar) self.assertTrue(jar['bar'], files[1]) self.assertTrue(jar['foo'], files[0]) self.assertTrue(jar['baz/qux'], files[2]) s.seek(0) jar = JarReader(fileobj=s) self.assertTrue('bar' in jar) self.assertTrue('foo' in jar) self.assertFalse('baz' in jar) self.assertTrue('baz/qux' in jar) files[0].seek(0) self.assertEqual(jar['bar'].filename, files[0].filename) self.assertEqual(jar['bar'].compressed, files[0].compressed) self.assertEqual(jar['bar'].read(), files[0].read()) files[1].seek(0) self.assertEqual(jar['foo'].filename, files[1].filename) self.assertEqual(jar['foo'].compressed, files[1].compressed) self.assertEqual(jar['foo'].read(), files[1].read()) files[2].seek(0) self.assertEqual(jar['baz/qux'].filename, files[2].filename) self.assertEqual(jar['baz/qux'].compressed, files[2].compressed) self.assertEqual(jar['baz/qux'].read(), files[2].read())
def fat_aar(distdir, aars_paths, no_process=False, no_compatibility_check=False): if no_process: print('Not processing architecture-specific artifact Maven AARs.') return 0 # Map {filename: {fingerprint: [arch1, arch2, ...]}}. diffs = defaultdict(lambda: defaultdict(list)) missing_arch_prefs = set() # Collect multi-architecture inputs to the fat AAR. copier = FileCopier() for arch, aar_path in aars_paths.items(): # Map old non-architecture-specific path to new architecture-specific path. old_rewrite_map = { 'greprefs.js': '{}/greprefs.js'.format(arch), 'defaults/pref/geckoview-prefs.js': 'defaults/pref/{}/geckoview-prefs.js'.format(arch), } # Architecture-specific preferences files. arch_prefs = set(old_rewrite_map.values()) missing_arch_prefs |= set(arch_prefs) jar_finder = JarFinder(aar_path, JarReader(aar_path)) for path, fileobj in UnpackFinder(jar_finder): # Native libraries go straight through. if mozpath.match(path, 'jni/**'): copier.add(path, fileobj) elif path in arch_prefs: copier.add(path, fileobj) elif path in ('classes.jar', 'annotations.zip'): # annotations.zip differs due to timestamps, but the contents should not. # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt, # and it's not worth working around, so we use Python's zip functionality # instead. z = ZipFile(BytesIO(fileobj.open().read())) for r in z.namelist(): fingerprint = sha1(z.open(r).read()).hexdigest() diffs['{}!/{}'.format(path, r)][fingerprint].append(arch) else: fingerprint = sha1(fileobj.open().read()).hexdigest() # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here, # since in practice they will never overlap. diffs[path][fingerprint].append(arch) missing_arch_prefs.discard(path) # Some differences are allowed across the architecture-specific AARs. We could allow-list # the actual content, but it's not necessary right now. allow_pattern_list = { 'AndroidManifest.xml', # Min SDK version is different for 32- and 64-bit builds. 'classes.jar!/org/mozilla/gecko/util/HardwareUtils.class', # Min SDK as well. 'classes.jar!/org/mozilla/geckoview/BuildConfig.class', # Each input captures its CPU architecture. 'chrome/toolkit/content/global/buildconfig.html', # Bug 1556162: localized resources are not deterministic across # per-architecture builds triggered from the same push. '**/*.ftl', '**/*.dtd', '**/*.properties', } not_allowed = OrderedDict() def format_diffs(ds): # Like ' armeabi-v7a, arm64-v8a -> XXX\n x86, x86_64 -> YYY'. return '\n'.join( sorted(' {archs} -> {fingerprint}'.format( archs=', '.join(sorted(archs)), fingerprint=fingerprint) for fingerprint, archs in ds.iteritems())) for p, ds in sorted(diffs.iteritems()): if len(ds) <= 1: # Only one hash across all inputs: roll on. continue if any(mozpath.match(p, pat) for pat in allow_pattern_list): print( 'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}' .format(path=p, ds_repr=format_diffs(ds))) continue not_allowed[p] = ds for p, ds in not_allowed.iteritems(): print( 'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}' .format(path=p, ds_repr=format_diffs(ds))) for missing in sorted(missing_arch_prefs): print( 'Disallowed: Inputs missing expected architecture-specific input: {missing}' .format(missing=missing)) if not no_compatibility_check and (missing_arch_prefs or not_allowed): return 1 output_dir = mozpath.join(distdir, 'output') copier.copy(output_dir) return 0
def check_jar(self, dest, copier): jar = JarReader(fileobj=dest) self.assertEqual([f.filename for f in jar], copier.paths()) for f in jar: self.assertEqual(f.uncompressed_data.read(), copier[f.filename].content)
def copy(self, dest, skip_if_older=True): ''' Pack all registered files in the given destination jar. The given destination jar may be a path to jar file, or a Dest instance for a jar file. If the destination jar file exists, its (compressed) contents are used instead of the registered BaseFile instances when appropriate. ''' class DeflaterDest(Dest): ''' Dest-like class, reading from a file-like object initially, but switching to a Deflater object if written to. dest = DeflaterDest(original_file) dest.read() # Reads original_file dest.write(data) # Creates a Deflater and write data there dest.read() # Re-opens the Deflater and reads from it ''' def __init__(self, orig=None, compress=True): self.mode = None self.deflater = orig self.compress = compress def read(self, length=-1): if self.mode != 'r': assert self.mode is None self.mode = 'r' return self.deflater.read(length) def write(self, data): if self.mode != 'w': from mozpack.mozjar import Deflater self.deflater = Deflater(self.compress) self.mode = 'w' self.deflater.write(data) def exists(self): return self.deflater is not None if isinstance(dest, basestring): dest = Dest(dest) assert isinstance(dest, Dest) from mozpack.mozjar import JarWriter, JarReader try: old_jar = JarReader(fileobj=dest) except Exception: old_jar = [] old_contents = dict([(f.filename, f) for f in old_jar]) with JarWriter(fileobj=dest, compress=self.compress, optimize=self.optimize) as jar: for path, file in self: compress = self._compress_options.get(path, self.compress) if path in old_contents: deflater = DeflaterDest(old_contents[path], compress) else: deflater = DeflaterDest(compress=compress) file.copy(deflater, skip_if_older) jar.add(path, deflater.deflater, mode=file.mode, compress=compress) if self._preload: jar.preload(self._preload)
def repackage_msix( dir_or_package, channel=None, branding=None, template=None, distribution_dirs=[], locale_allowlist=set(), version=None, vendor=None, displayname=None, app_name="firefox", identity=None, publisher=None, publisher_display_name="Mozilla Corporation", arch=None, output=None, force=False, log=None, verbose=False, makeappx=None, ): if not channel: raise Exception("channel is required") if channel not in ["official", "beta", "aurora", "nightly", "unofficial"]: raise Exception("channel is unrecognized: {}".format(channel)) if not branding: raise Exception("branding dir is required") if not os.path.isdir(branding): raise Exception("branding dir {} does not exist".format(branding)) # TODO: maybe we can fish this from the package directly? Maybe from a DLL, # maybe from application.ini? if arch is None or arch not in _MSIX_ARCH.keys(): raise Exception( "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys()) ) if not os.path.exists(dir_or_package): raise Exception("{} does not exist".format(dir_or_package)) if ( os.path.isfile(dir_or_package) and os.path.splitext(dir_or_package)[1] == ".msix" ): # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. msix_dir = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "msix-unpack", ) ) if os.path.exists(msix_dir): shutil.rmtree(msix_dir) ensureParentDir(msix_dir) dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose) log( logging.INFO, "msix", { "input": dir_or_package, }, "Adding files from '{input}'", ) if os.path.isdir(dir_or_package): finder = FileFinder(dir_or_package) else: finder = JarFinder(dir_or_package, JarReader(dir_or_package)) values = get_application_ini_values( finder, dict(section="App", value="CodeName", fallback="Name"), dict(section="App", value="Vendor"), ) first = next(values) displayname = displayname or "Mozilla {}".format(first) second = next(values) vendor = vendor or second # For `AppConstants.jsm` and `brand.properties`, which are in the omnijar in packaged builds. # The nested langpack XPI files can't be read by `mozjar.py`. unpack_finder = UnpackFinder(finder, unpack_xpi=False) if not version: values = get_appconstants_jsm_values( unpack_finder, "MOZ_APP_VERSION_DISPLAY", "MOZ_BUILDID" ) display_version = next(values) buildid = next(values) version = get_embedded_version(display_version, buildid) log( logging.INFO, "msix", { "version": version, "display_version": display_version, "buildid": buildid, }, "AppConstants.jsm display version is '{display_version}' and build ID is '{buildid}':" + " embedded version will be '{version}'", ) # TODO: Bug 1721922: localize this description via Fluent. lines = [] for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"): lines.extend( line for line in f.open().read().decode("utf-8").splitlines() if "brandFullName" in line ) (brandFullName,) = lines # We expect exactly one definition. _, _, brandFullName = brandFullName.partition("=") brandFullName = brandFullName.strip() # We don't have a build at repackage-time to gives us this value, and the # source of truth is a branding-specific `configure.sh` shell script that we # can't easily evaluate completely here. Instead, we take the last value # from `configure.sh`. lines = [ line for line in open(mozpath.join(branding, "configure.sh")).readlines() if "MOZ_IGECKOBACKCHANNEL_IID" in line ] MOZ_IGECKOBACKCHANNEL_IID = lines[-1] _, _, MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.partition("=") MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.strip() if MOZ_IGECKOBACKCHANNEL_IID.startswith(('"', "'")): MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID[1:-1] # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. output_dir = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel) ) ) if channel == "beta": # Release (official) and Beta share branding. Differentiate Beta a little bit. displayname += " Beta" brandFullName += " Beta" # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta # Package Root'. This is `BrandFullName` in the installer, and we want to # be close but to not match. By not matching, we hope to prevent confusion # and/or errors between regularly installed builds and App Package builds. instdir = "{} Package Root".format(displayname) # The standard package name is like "CompanyNoSpaces.ProductNoSpaces". identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "") # We might want to include the publisher ID hash here. I.e., # "__{publisherID}". My locally produced MSIX was named like # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a # missing field, but it's not necessary, since this is just an output file name. package_output_name = "{identity}_{version}_{arch}".format( identity=identity, version=version, arch=_MSIX_ARCH[arch] ) # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. default_output = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name) ) ) output = output or default_output log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}") m = InstallManifest() m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri") m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets") m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS") copier = FileCopier() # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead. for p, f in finder: if not os.path.isdir(dir_or_package): # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe". pp = os.path.relpath(p, "firefox") else: # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already. pp = p if pp.startswith("distribution"): # Treat any existing distribution as a distribution directory, # potentially with language packs. This makes it easy to repack # unpacked MSIXes. distribution_dir = mozpath.join(dir_or_package, "distribution") if distribution_dir not in distribution_dirs: distribution_dirs.append(distribution_dir) continue copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f) # Locales to declare as supported in `AppxManifest.xml`. locales = set(["en-US"]) for distribution_dir in [ mozpath.join(template, "distribution") ] + distribution_dirs: log( logging.INFO, "msix", {"dir": distribution_dir}, "Adding distribution files from {dir}", ) # In automation, we have no easy way to remap the names of artifacts fetched from dependent # tasks. In particular, langpacks will be named like `target.langpack.xpi`. The fetch # tasks do allow us to put them in a per-locale directory, so that the entire set can be # fetched. Here we remap the names. finder = FileFinder(distribution_dir) for p, f in finder: locale = None if os.path.basename(p) == "target.langpack.xpi": # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE". This is how langpacks # are presented in CI. base, locale = os.path.split(os.path.dirname(p)) # Like "locale-LOCALE/[email protected]". This is what AMO # serves and how flatpak builds name langpacks, but not how snap builds name # langpacks. I can't explain the discrepancy. dest = mozpath.normsep( mozpath.join( base, f"locale-{locale}", f"langpack-{locale}@firefox.mozilla.org.xpi", ) ) log( logging.DEBUG, "msix", {"path": p, "dest": dest}, "Renaming langpack {path} to {dest}", ) elif os.path.basename(p).startswith("langpack-"): # Turn "/path/to/[email protected]" into "LOCALE". This is # how langpacks are presented from an unpacked MSIX. _, _, locale = os.path.basename(p).partition("langpack-") locale, _, _ = locale.partition("@") dest = p else: dest = p if locale: locale = locale.strip().lower() locales.add(locale) log( logging.DEBUG, "msix", {"locale": locale, "dest": dest}, "Distributing locale '{locale}' from {dest}", ) dest = mozpath.normsep( mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest) ) if copier.contains(dest): log( logging.INFO, "msix", {"dest": dest, "path": mozpath.join(finder.base, p)}, "Skipping duplicate: {dest} from {path}", ) continue log( logging.DEBUG, "msix", {"dest": dest, "path": mozpath.join(finder.base, p)}, "Adding distribution path: {dest} from {path}", ) copier.add( dest, f, ) locales.remove("en-US") # Windows MSIX packages support a finite set of locales: see # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales. # We distribute all of the langpacks supported by the release channel in our MSIX, which is # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales. But we # only advertise support in the App manifest for the intersection of that set and the set of # supported locales. # # We distribute all langpacks to avoid the following issue. Suppose a user manually installs a # langpack that is not supported by Windows, and then updates the installed MSIX package. MSIX # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to # update the langpack before the update. But, since all langpacks are bundled with the MSIX, # that langpack will be up-to-date, preventing one class of YSOD. unadvertised = set() if locale_allowlist: unadvertised = locales - locale_allowlist locales = locales & locale_allowlist for locale in sorted(unadvertised): log( logging.INFO, "msix", {"locale": locale}, "Not advertising distributed locale '{locale}' that is not recognized by Windows", ) locales = ["en-US"] + list(sorted(locales)) resource_language_list = "\n".join( f' <Resource Language="{locale}" />' for locale in sorted(locales) ) defines = { "APPX_ARCH": _MSIX_ARCH[arch], "APPX_DISPLAYNAME": brandFullName, "APPX_DESCRIPTION": brandFullName, # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or # 'Mozilla.MozillaFirefoxNightly'. "APPX_IDENTITY": identity, # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox # Beta Package Root'. See above. "APPX_INSTDIR": instdir, # Like 'Firefox%20Package%20Root'. "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir), "APPX_PUBLISHER": publisher, "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name, "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list, "APPX_VERSION": version, "MOZ_APP_DISPLAYNAME": displayname, "MOZ_APP_NAME": app_name, "MOZ_IGECKOBACKCHANNEL_IID": MOZ_IGECKOBACKCHANNEL_IID, } m.add_preprocess( mozpath.join(template, "AppxManifest.xml.in"), "AppxManifest.xml", [], defines=defines, marker="<!-- #", # So that we can have well-formed XML. ) m.populate_registry(copier) output_dir = mozpath.abspath(output_dir) ensureParentDir(output_dir) start = time.time() result = copier.copy( output_dir, remove_empty_directories=True, skip_if_older=not force ) if log: log_copy_result(log, time.time() - start, output_dir, result) if verbose: # Dump AppxManifest.xml contents for ease of debugging. log(logging.DEBUG, "msix", {}, "AppxManifest.xml") log(logging.DEBUG, "msix", {}, ">>>") for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines(): log(logging.DEBUG, "msix", {}, line[:-1]) # Drop trailing line terminator. log(logging.DEBUG, "msix", {}, "<<<") if not makeappx: makeappx = find_sdk_tool("makeappx.exe", log=log) if not makeappx: raise ValueError( "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH" ) # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix` # supports only hyphen style. `makeappx.exe` allows to overwrite and to # provide more feedback, so we prefer invoking with these flags. This will # also accommodate `wine makeappx.exe`. stdout = subprocess.run( [makeappx], check=False, capture_output=True, universal_newlines=True ).stdout is_makeappx = "MakeAppx Tool" in stdout if is_makeappx: args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"] else: args = [makeappx, "pack", "-d", output_dir, "-p", output] if verbose and is_makeappx: args.append("/verbose") joined = " ".join(shlex_quote(arg) for arg in args) log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}") sys.stdout.flush() # Otherwise the subprocess output can be interleaved. if verbose: subprocess.check_call(args, universal_newlines=True) else: # Suppress output unless we fail. try: subprocess.check_output(args, universal_newlines=True) except subprocess.CalledProcessError as e: sys.stderr.write(e.output) raise return output
def copy(self, dest, skip_if_older=True): ''' Pack all registered files in the given destination jar. The given destination jar may be a path to jar file, or a Dest instance for a jar file. If the destination jar file exists, its (compressed) contents are used instead of the registered BaseFile instances when appropriate. ''' class DeflaterDest(Dest): ''' Dest-like class, reading from a file-like object initially, but switching to a Deflater object if written to. dest = DeflaterDest(original_file) dest.read() # Reads original_file dest.write(data) # Creates a Deflater and write data there dest.read() # Re-opens the Deflater and reads from it ''' def __init__(self, orig=None, compress=True): self.mode = None self.deflater = orig self.compress = compress def read(self, length=-1): if self.mode != 'r': assert self.mode is None self.mode = 'r' return self.deflater.read(length) def write(self, data): if self.mode != 'w': from mozpack.mozjar import Deflater self.deflater = Deflater(self.compress) self.mode = 'w' self.deflater.write(data) def exists(self): return self.deflater is not None if isinstance(dest, basestring): dest = Dest(dest) assert isinstance(dest, Dest) from mozpack.mozjar import JarWriter, JarReader, JAR_BROTLI try: old_jar = JarReader(fileobj=dest) except Exception: old_jar = [] old_contents = dict([(f.filename, f) for f in old_jar]) with JarWriter(fileobj=dest, compress=self.compress) as jar: for path, file in self: compress = self._compress_options.get(path, self.compress) # Temporary: Because l10n repacks can't handle brotli just yet, # but need to be able to decompress those files, per # UnpackFinder and formatters, we force deflate on them. if compress == JAR_BROTLI and (isinstance(file, ManifestFile) or mozpath.basename(path) == 'install.rdf'): compress = True # If the added content already comes from a jar file, we just add # the raw data from the original jar file to the new one. if isinstance(file, DeflatedFile): jar.add(path, file.file, mode=file.mode, compress=file.file.compress) continue # If the file is already in the old contents for this jar, # we avoid compressing when the contents match, which requires # decompressing the old content. But for e.g. l10n repacks, # which can't decompress brotli, we skip this. elif path in old_contents and old_contents[ path].compress != JAR_BROTLI: deflater = DeflaterDest(old_contents[path], compress) else: deflater = DeflaterDest(compress=compress) file.copy(deflater, skip_if_older) jar.add(path, deflater.deflater, mode=file.mode, compress=compress) if self._preload: jar.preload(self._preload)
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None, lib_dirs=[], assets_dirs=[], features_dirs=[], root_files=[], verbose=False): jarrer = Jarrer(optimize=False) # First, take input files. The contents of the later files overwrites the # content of earlier files. for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % \ (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for features_dir in features_dirs: finder = FileFinder(features_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('assets', 'features', p), f, False) for assets_dir in assets_dirs: finder = FileFinder(assets_dir, find_executables=False) for p, f in finder.find('**'): compress = None # Take default from Jarrer. if p.endswith('.so'): # Asset libraries are special. if f.open().read(5)[1:] == '7zXZ': print('%s is already compressed' % p) # We need to store (rather than deflate) compressed libraries # (even if we don't compress them ourselves). compress = False elif buildconfig.substs.get('XZ'): cmd = [ buildconfig.substs.get('XZ'), '-zkf', mozpath.join(finder.base, p) ] bcj = None if buildconfig.substs.get('MOZ_THUMB2'): bcj = '--armthumb' elif buildconfig.substs.get('CPU_ARCH') == 'arm': bcj = '--arm' elif buildconfig.substs.get('CPU_ARCH') == 'x86': bcj = '--x86' if bcj: cmd.extend([bcj, '--lzma2']) print('xz-compressing %s with %s' % (p, ' '.join(cmd))) subprocess.check_output(cmd) os.rename(f.path + '.xz', f.path) compress = False add(mozpath.join('assets', p), f, compress=compress) for lib_dir in lib_dirs: finder = FileFinder(lib_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) if classes_dex: add('classes.dex', File(classes_dex)) return jarrer
def package_fennec_apk(inputs=[], omni_ja=None, lib_dirs=[], assets_dirs=[], features_dirs=[], root_files=[], verbose=False): jarrer = Jarrer() # First, take input files. The contents of the later files overwrites the # content of earlier files. Multidexing requires special care: we want a # coherent set of classesN.dex files, so we only take DEX files from a # single input. This avoids taking, say, classes{1,2,3}.dex from the first # input and only classes{1,2}.dex from the second input, leading to # (potentially) duplicated symbols at runtime. last_input_with_dex_files = None for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if mozpath.match(path, '/classes*.dex'): last_input_with_dex_files = input continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) # If we have an input with DEX files, take them all here. if last_input_with_dex_files: jar = JarReader(last_input_with_dex_files) for file in jar: path = file.filename if not mozpath.match(path, '/classes*.dex'): continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for features_dir in features_dirs: finder = FileFinder(features_dir) for p, f in finder.find('**'): add(mozpath.join('assets', 'features', p), f, False) for assets_dir in assets_dirs: finder = FileFinder(assets_dir) for p, f in finder.find('**'): add(mozpath.join('assets', p), f) for lib_dir in lib_dirs: finder = FileFinder(lib_dir) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) return jarrer
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None, lib_dirs=[], assets_dirs=[], features_dirs=[], root_files=[], verbose=False): jarrer = Jarrer(optimize=False) # First, take input files. The contents of the later files overwrites the # content of earlier files. Multidexing requires special care: we want a # coherent set of classesN.dex files, so we only take DEX files from a # single input. This avoids taking, say, classes{1,2,3}.dex from the first # input and only classes{1,2}.dex from the second input, leading to # (potentially) duplicated symbols at runtime. last_input_with_dex_files = None for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if mozpath.match(path, '/classes*.dex'): last_input_with_dex_files = input continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) # If we have an input with DEX files, take them all here. if last_input_with_dex_files: jar = JarReader(last_input_with_dex_files) for file in jar: path = file.filename if not mozpath.match(path, '/classes*.dex'): continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % \ (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for features_dir in features_dirs: finder = FileFinder(features_dir) for p, f in finder.find('**'): add(mozpath.join('assets', 'features', p), f, False) for assets_dir in assets_dirs: finder = FileFinder(assets_dir) for p, f in finder.find('**'): compress = None # Take default from Jarrer. if p.endswith('.so'): # Asset libraries are special. if f.open().read(5)[1:] == '7zXZ': print('%s is already compressed' % p) # We need to store (rather than deflate) compressed libraries # (even if we don't compress them ourselves). compress = False elif buildconfig.substs.get('XZ'): cmd = [ buildconfig.substs.get('XZ'), '-zkf', mozpath.join(finder.base, p) ] # For now, the mozglue XZStream ELF loader can only support xz files # with a single stream that contains a single block. In xz, there is no # explicit option to set the max block count. Instead, we force xz to use # single thread mode, which results in a single block. cmd.extend(['--threads=1']) bcj = None if buildconfig.substs.get('MOZ_THUMB2'): bcj = '--armthumb' elif buildconfig.substs.get('CPU_ARCH') == 'arm': bcj = '--arm' elif buildconfig.substs.get('CPU_ARCH') == 'x86': bcj = '--x86' if bcj: cmd.extend([bcj]) # We need to explicitly specify the LZMA filter chain to ensure consistent builds # across platforms. Note that the dict size must be less then 16MiB per the hardcoded # value in mozglue/linker/XZStream.cpp. This is the default LZMA filter chain for for # xz-utils version 5.0. See: # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/lzma/lzma_encoder_presets.c # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/api/lzma/container.h#L31 cmd.extend([ '--lzma2=dict=8MiB,lc=3,lp=0,pb=2,mode=normal,nice=64,mf=bt4,depth=0' ]) print('xz-compressing %s with %s' % (p, ' '.join(cmd))) subprocess.check_output(cmd) os.rename(f.path + '.xz', f.path) compress = False add(mozpath.join('assets', p), f, compress=compress) for lib_dir in lib_dirs: finder = FileFinder(lib_dir) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) if classes_dex: if buildconfig.substs.get('MOZ_BUILD_MOBILE_ANDROID_WITH_GRADLE'): raise ValueError("Fennec APKs built --with-gradle " "should never specify classes.dex") add('classes.dex', File(classes_dex)) return jarrer
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None, lib_dirs=[], assets_dirs=[], szip_assets_libs_with=None, root_files=[], verbose=False): jarrer = Jarrer(optimize=False) # First, take input files. The contents of the later files overwrites the # content of earlier files. for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % \ (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for assets_dir in assets_dirs: finder = FileFinder(assets_dir, find_executables=False) for p, f in finder.find('**'): compress = None # Take default from Jarrer. if p.endswith('.so'): # Asset libraries are special. if szip_assets_libs_with: # We need to szip libraries before packing. The file # returned by the finder is not yet opened. When it is # opened, it will "see" the content updated by szip. subprocess.check_output( [szip_assets_libs_with, mozpath.join(finder.base, p)]) if f.open().read(4) == 'SeZz': # We need to store (rather than deflate) szipped libraries # (even if we don't szip them ourselves). compress = False add(mozpath.join('assets', p), f, compress=compress) for lib_dir in lib_dirs: finder = FileFinder(lib_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) if classes_dex: add('classes.dex', File(classes_dex)) return jarrer
def test_jar(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add("foo", b"foo") self.assertRaises(JarWriterError, jar.add, "foo", b"bar") jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz") jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False) jar.add("baz\\backslash", b"aaaaaaaaaaaaaaa") files = [j for j in JarReader(fileobj=s)] self.assertEqual(files[0].filename, "foo") self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), b"foo") self.assertEqual(files[1].filename, "bar") self.assertTrue(files[1].compressed) self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") self.assertEqual(files[2].filename, "baz/qux") self.assertFalse(files[2].compressed) self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") if os.sep == "\\": self.assertEqual( files[3].filename, "baz/backslash", "backslashes in filenames on Windows should get normalized", ) else: self.assertEqual( files[3].filename, "baz\\backslash", "backslashes in filenames on POSIX platform are untouched", ) s = MockDest() with JarWriter(fileobj=s, compress=False) as jar: jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz") jar.add("foo", b"foo") jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", True) jar = JarReader(fileobj=s) files = [j for j in jar] self.assertEqual(files[0].filename, "bar") self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") self.assertEqual(files[1].filename, "foo") self.assertFalse(files[1].compressed) self.assertEqual(files[1].read(), b"foo") self.assertEqual(files[2].filename, "baz/qux") self.assertTrue(files[2].compressed) self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") self.assertTrue("bar" in jar) self.assertTrue("foo" in jar) self.assertFalse("baz" in jar) self.assertTrue("baz/qux" in jar) self.assertTrue(jar["bar"], files[1]) self.assertTrue(jar["foo"], files[0]) self.assertTrue(jar["baz/qux"], files[2]) s.seek(0) jar = JarReader(fileobj=s) self.assertTrue("bar" in jar) self.assertTrue("foo" in jar) self.assertFalse("baz" in jar) self.assertTrue("baz/qux" in jar) files[0].seek(0) self.assertEqual(jar["bar"].filename, files[0].filename) self.assertEqual(jar["bar"].compressed, files[0].compressed) self.assertEqual(jar["bar"].read(), files[0].read()) files[1].seek(0) self.assertEqual(jar["foo"].filename, files[1].filename) self.assertEqual(jar["foo"].compressed, files[1].compressed) self.assertEqual(jar["foo"].read(), files[1].read()) files[2].seek(0) self.assertEqual(jar["baz/qux"].filename, files[2].filename) self.assertEqual(jar["baz/qux"].compressed, files[2].compressed) self.assertEqual(jar["baz/qux"].read(), files[2].read())
def test_preload(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add("foo", b"foo") jar.add("bar", b"abcdefghijklmnopqrstuvwxyz") jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz") jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, None) with JarWriter(fileobj=s) as jar: jar.add("foo", b"foo") jar.add("bar", b"abcdefghijklmnopqrstuvwxyz") jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz") jar.preload(["baz/qux", "bar"]) jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, "bar") files = [j for j in jar] self.assertEqual(files[0].filename, "baz/qux") self.assertEqual(files[1].filename, "bar") self.assertEqual(files[2].filename, "foo")
def precompile_cache(registry, source_path, gre_path, app_path): """ Create startup cache for the given application directory, using the given GRE path. - registry is a FileRegistry-like instance where to add the startup cache. - source_path is the base path of the package. - gre_path is the GRE path, relative to source_path. - app_path is the application path, relative to source_path. Startup cache for all resources under resource://app/ are generated, except when gre_path == app_path, in which case it's under resource://gre/. """ from tempfile import mkstemp source_path = os.path.abspath(source_path) if app_path != gre_path: resource = "app" else: resource = "gre" app_path = os.path.join(source_path, app_path) gre_path = os.path.join(source_path, gre_path) fd, cache = mkstemp(".zip") os.close(fd) os.remove(cache) try: extra_env = {"MOZ_STARTUP_CACHE": cache} if buildconfig.substs.get("MOZ_TSAN"): extra_env["TSAN_OPTIONS"] = "report_bugs=0" if buildconfig.substs.get("MOZ_ASAN"): extra_env["ASAN_OPTIONS"] = "detect_leaks=0" if launcher.launch( [ "xpcshell", "-g", gre_path, "-a", app_path, "-f", os.path.join(os.path.dirname(__file__), "precompile_cache.js"), "-e", 'precompile_startupcache("resource://%s/");' % resource, ], extra_linker_path=gre_path, extra_env=extra_env, ): errors.fatal("Error while running startup cache precompilation") return from mozpack.mozjar import JarReader jar = JarReader(cache) resource = "/resource/%s/" % resource for f in jar: if resource in f.filename: path = f.filename[f.filename.index(resource) + len(resource) :] if registry.contains(path): registry.add(f.filename, GeneratedFile(f.read())) jar.close() finally: if os.path.exists(cache): os.remove(cache)