def package_geckolibs_aar(topsrcdir, distdir, output_file): jarrer = Jarrer(optimize=False) srcdir = os.path.join(topsrcdir, 'mobile', 'android', 'geckoview_library', 'geckolibs') jarrer.add('AndroidManifest.xml', File(os.path.join(srcdir, 'AndroidManifest.xml'))) jarrer.add('classes.jar', File(os.path.join(srcdir, 'classes.jar'))) jni = FileFinder(os.path.join(distdir, 'fennec', 'lib')) for p, f in jni.find('**/*.so'): jarrer.add(os.path.join('jni', p), f) # Include the buildinfo JSON as an asset, to give future consumers at least # a hope of determining where this AAR came from. json = FileFinder(distdir, ignore=['*.mozinfo.json']) for p, f in json.find('*.json'): jarrer.add(os.path.join('assets', p), f) # This neatly ignores omni.ja. assets = FileFinder(os.path.join(distdir, 'fennec', 'assets')) for p, f in assets.find('**/*.so'): jarrer.add(os.path.join('assets', p), f) jarrer.copy(output_file) return 0
def test_file(self): ''' Check that File.copy yields the proper content in the destination file in all situations that trigger different code paths: - different content - different content of the same size - same content - long content ''' src = self.tmppath('src') dest = self.tmppath('dest') for content in samples: with open(src, 'wb') as tmp: tmp.write(content) # Ensure the destination file, when it exists, is older than the # source if os.path.exists(dest): time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) f = File(src) f.copy(dest) self.assertEqual(content, open(dest, 'rb').read()) self.assertEqual(content, f.open().read()) self.assertEqual(content, f.open().read())
def test_file(self): """ Check that File.copy yields the proper content in the destination file in all situations that trigger different code paths: - different content - different content of the same size - same content - long content """ src = self.tmppath("src") dest = self.tmppath("dest") for content in samples: with open(src, "wb") as tmp: tmp.write(content) # Ensure the destination file, when it exists, is older than the # source if os.path.exists(dest): time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) f = File(src) f.copy(dest) self.assertEqual(content, open(dest, "rb").read()) self.assertEqual(content, f.open().read()) self.assertEqual(content, f.open().read())
def package_geckoview_aar(topsrcdir, distdir, output_file): jarrer = Jarrer(optimize=False) fennec_path = os.path.join(distdir, 'fennec') assets = FileFinder(os.path.join(fennec_path, 'assets'), ignore=['*.so']) for p, f in assets.find('omni.ja'): jarrer.add(os.path.join('assets', p), f) # The folder that contains Fennec's JAR files and resources. base_path = os.path.join(distdir, '..', 'mobile', 'android', 'base') # The resource set is packaged during Fennec's build. resjar = JarReader(os.path.join(base_path, 'geckoview_resources.zip')) for p, f in JarFinder(p, resjar).find('*'): jarrer.add(os.path.join('res', p), f) # Package the contents of all Fennec JAR files into classes.jar. classes_jar_file = _generate_geckoview_classes_jar(distdir, base_path) jarrer.add('classes.jar', classes_jar_file) # Add R.txt. jarrer.add('R.txt', File(os.path.join(base_path, 'R.txt'))) # Finally add AndroidManifest.xml. srcdir = os.path.join(topsrcdir, 'mobile', 'android', 'geckoview_library', 'geckoview') jarrer.add('AndroidManifest.xml', File(os.path.join(srcdir, 'AndroidManifest.xml'))) jarrer.copy(output_file) return 0
def test_file_open(self): ''' Test whether File.open returns an appropriately reset file object. ''' src = self.tmppath('src') content = ''.join(samples) with open(src, 'wb') as tmp: tmp.write(content) f = File(src) self.assertEqual(content[:42], f.open().read(42)) self.assertEqual(content, f.open().read())
def test_file_open(self): """ Test whether File.open returns an appropriately reset file object. """ src = self.tmppath("src") content = "".join(samples) with open(src, "wb") as tmp: tmp.write(content) f = File(src) self.assertEqual(content[:42], f.open().read(42)) self.assertEqual(content, f.open().read())
def test_file_open(self): """ Test whether File.open returns an appropriately reset file object. """ src = self.tmppath("src") content = b"".join(samples) with open(src, "wb") as tmp: tmp.write(content) f = File(src) self.assertEqual(content[:42], f.open().read(42)) self.assertEqual(content, f.open().read())
def test_file_dest(self): ''' Similar to test_file, but for a destination object instead of a destination file. This ensures the destination object is being used properly by File.copy, ensuring that other subclasses of Dest will work. ''' src = self.tmppath('src') dest = MockDest() for content in samples: with open(src, 'wb') as tmp: tmp.write(content) f = File(src) f.copy(dest) self.assertEqual(content, dest.getvalue())
def test_file_dest(self): """ Similar to test_file, but for a destination object instead of a destination file. This ensures the destination object is being used properly by File.copy, ensuring that other subclasses of Dest will work. """ src = self.tmppath("src") dest = MockDest() for content in samples: with open(src, "wb") as tmp: tmp.write(content) f = File(src) f.copy(dest) self.assertEqual(content, dest.getvalue())
def test_minified_properties(self): propLines = [ "# Comments are removed", "foo = bar", "", "# Another comment", ] prop = GeneratedFile("\n".join(propLines)) self.assertEqual( MinifiedProperties(prop).open().readlines(), [b"foo = bar\n", b"\n"] ) open(self.tmppath("prop"), "w").write("\n".join(propLines)) MinifiedProperties(File(self.tmppath("prop"))).copy(self.tmppath("prop2")) self.assertEqual(open(self.tmppath("prop2")).readlines(), ["foo = bar\n", "\n"])
def test_minified_properties(self): propLines = [ '# Comments are removed', 'foo = bar', '', '# Another comment', ] prop = GeneratedFile('\n'.join(propLines)) self.assertEqual( MinifiedProperties(prop).open().readlines(), ['foo = bar\n', '\n']) open(self.tmppath('prop'), 'wb').write('\n'.join(propLines)) MinifiedProperties(File(self.tmppath('prop'))) \ .copy(self.tmppath('prop2')) self.assertEqual( open(self.tmppath('prop2')).readlines(), ['foo = bar\n', '\n'])
def _generate_geckoview_classes_jar(distdir, base_path): base_folder = FileFinder(base_path, ignore=['gecko-R.jar']) # Unzip all jar files into $(DISTDIR)/geckoview_aar_classes. geckoview_aar_classes_path = os.path.join(distdir, 'geckoview_aar_classes') shutil.rmtree(geckoview_aar_classes_path, ignore_errors=True) util.ensureParentDir(geckoview_aar_classes_path) for p, f in base_folder.find('*.jar'): with zipfile.ZipFile(f.path) as zf: zf.extractall(geckoview_aar_classes_path) # Rezip them into a single classes.jar file. classes_jar_path = os.path.join(distdir, 'classes.jar') _zipdir(geckoview_aar_classes_path, classes_jar_path) return File(classes_jar_path)
def test_file_no_write(self): ''' Test various conditions where File.copy is expected not to write in the destination file. ''' src = self.tmppath('src') dest = self.tmppath('dest') with open(src, 'wb') as tmp: tmp.write('test') # Initial copy f = File(src) f.copy(dest) # Ensure subsequent copies won't trigger writes f.copy(DestNoWrite(dest)) self.assertEqual('test', open(dest, 'rb').read()) # When the source file is newer, but with the same content, no copy # should occur time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) f.copy(DestNoWrite(dest)) self.assertEqual('test', open(dest, 'rb').read()) # When the source file is older than the destination file, even with # different content, no copy should occur. with open(src, 'wb') as tmp: tmp.write('fooo') time = os.path.getmtime(dest) - 1 os.utime(src, (time, time)) f.copy(DestNoWrite(dest)) self.assertEqual('test', open(dest, 'rb').read()) # Double check that under conditions where a copy occurs, we would get # an exception. time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
def prepare_zip_from(archive, tmpdir): if archive.startswith('http'): resp = requests.get(archive, allow_redirects=True, stream=True) resp.raise_for_status() reader = resp.raw # Work around taskcluster generic-worker possibly gzipping the tar.zst. if resp.headers.get('Content-Encoding') == 'gzip': reader = gzip.GzipFile(fileobj=reader) else: reader = open(archive, 'rb') ctx = zstandard.ZstdDecompressor() uncompressed = ctx.stream_reader(reader) with tarfile.open(mode='r|', fileobj=uncompressed, bufsize=1024 * 1024) as tar: while True: info = tar.next() if info is None: break log.info(info.name) data = tar.extractfile(info) path = os.path.join(tmpdir, info.name.lstrip('/')) if info.name.endswith('.dbg'): os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb') as fh: with gzip.GzipFile(fileobj=fh, mode='wb', compresslevel=5) as c: shutil.copyfileobj(data, c) jar.add(info.name + '.gz', File(path), compress=False) elif info.name.endswith('.dSYM.tar'): import bz2 os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb') as fh: c = bz2.BZ2Compressor() while True: buf = data.read(16384) if not buf: break fh.write(c.compress(buf)) fh.write(c.flush()) jar.add(info.name + '.bz2', File(path), compress=False) elif info.name.endswith('.pdb'): import subprocess makecab = os.environ.get('MAKECAB', 'makecab') os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb') as fh: shutil.copyfileobj(data, fh) subprocess.check_call([ makecab, '-D', 'CompressionType=MSZIP', path, path + '_' ], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) jar.add(info.name[:-1] + '_', File(path + '_'), compress=False) else: jar.add(info.name, data) reader.close()
def test_file_no_write(self): """ Test various conditions where File.copy is expected not to write in the destination file. """ src = self.tmppath("src") dest = self.tmppath("dest") with open(src, "wb") as tmp: tmp.write("test") # Initial copy f = File(src) f.copy(dest) # Ensure subsequent copies won't trigger writes f.copy(DestNoWrite(dest)) self.assertEqual("test", open(dest, "rb").read()) # When the source file is newer, but with the same content, no copy # should occur time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) f.copy(DestNoWrite(dest)) self.assertEqual("test", open(dest, "rb").read()) # When the source file is older than the destination file, even with # different content, no copy should occur. with open(src, "wb") as tmp: tmp.write("fooo") time = os.path.getmtime(dest) - 1 os.utime(src, (time, time)) f.copy(DestNoWrite(dest)) self.assertEqual("test", open(dest, "rb").read()) # Double check that under conditions where a copy occurs, we would get # an exception. time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) # skip_if_older=False is expected to force a copy in this situation. f.copy(dest, skip_if_older=False) self.assertEqual("fooo", open(dest, "rb").read())
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None, lib_dirs=[], assets_dirs=[], szip_assets_libs_with=None, root_files=[], verbose=False): jarrer = Jarrer(optimize=False) # First, take input files. The contents of the later files overwrites the # content of earlier files. for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % \ (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for assets_dir in assets_dirs: finder = FileFinder(assets_dir, find_executables=False) for p, f in finder.find('**'): compress = None # Take default from Jarrer. if p.endswith('.so'): # Asset libraries are special. if szip_assets_libs_with: # We need to szip libraries before packing. The file # returned by the finder is not yet opened. When it is # opened, it will "see" the content updated by szip. subprocess.check_output( [szip_assets_libs_with, mozpath.join(finder.base, p)]) if f.open().read(4) == 'SeZz': # We need to store (rather than deflate) szipped libraries # (even if we don't szip them ourselves). compress = False add(mozpath.join('assets', p), f, compress=compress) for lib_dir in lib_dirs: finder = FileFinder(lib_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) if classes_dex: add('classes.dex', File(classes_dex)) return jarrer
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None, lib_dirs=[], assets_dirs=[], features_dirs=[], root_files=[], verbose=False): jarrer = Jarrer(optimize=False) # First, take input files. The contents of the later files overwrites the # content of earlier files. Multidexing requires special care: we want a # coherent set of classesN.dex files, so we only take DEX files from a # single input. This avoids taking, say, classes{1,2,3}.dex from the first # input and only classes{1,2}.dex from the second input, leading to # (potentially) duplicated symbols at runtime. last_input_with_dex_files = None for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if mozpath.match(path, '/classes*.dex'): last_input_with_dex_files = input continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) # If we have an input with DEX files, take them all here. if last_input_with_dex_files: jar = JarReader(last_input_with_dex_files) for file in jar: path = file.filename if not mozpath.match(path, '/classes*.dex'): continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % \ (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for features_dir in features_dirs: finder = FileFinder(features_dir) for p, f in finder.find('**'): add(mozpath.join('assets', 'features', p), f, False) for assets_dir in assets_dirs: finder = FileFinder(assets_dir) for p, f in finder.find('**'): compress = None # Take default from Jarrer. if p.endswith('.so'): # Asset libraries are special. if f.open().read(5)[1:] == '7zXZ': print('%s is already compressed' % p) # We need to store (rather than deflate) compressed libraries # (even if we don't compress them ourselves). compress = False elif buildconfig.substs.get('XZ'): cmd = [ buildconfig.substs.get('XZ'), '-zkf', mozpath.join(finder.base, p) ] # For now, the mozglue XZStream ELF loader can only support xz files # with a single stream that contains a single block. In xz, there is no # explicit option to set the max block count. Instead, we force xz to use # single thread mode, which results in a single block. cmd.extend(['--threads=1']) bcj = None if buildconfig.substs.get('MOZ_THUMB2'): bcj = '--armthumb' elif buildconfig.substs.get('CPU_ARCH') == 'arm': bcj = '--arm' elif buildconfig.substs.get('CPU_ARCH') == 'x86': bcj = '--x86' if bcj: cmd.extend([bcj]) # We need to explicitly specify the LZMA filter chain to ensure consistent builds # across platforms. Note that the dict size must be less then 16MiB per the hardcoded # value in mozglue/linker/XZStream.cpp. This is the default LZMA filter chain for for # xz-utils version 5.0. See: # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/lzma/lzma_encoder_presets.c # https://github.com/xz-mirror/xz/blob/v5.0.0/src/liblzma/api/lzma/container.h#L31 cmd.extend([ '--lzma2=dict=8MiB,lc=3,lp=0,pb=2,mode=normal,nice=64,mf=bt4,depth=0' ]) print('xz-compressing %s with %s' % (p, ' '.join(cmd))) subprocess.check_output(cmd) os.rename(f.path + '.xz', f.path) compress = False add(mozpath.join('assets', p), f, compress=compress) for lib_dir in lib_dirs: finder = FileFinder(lib_dir) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) if classes_dex: if buildconfig.substs.get('MOZ_BUILD_MOBILE_ANDROID_WITH_GRADLE'): raise ValueError("Fennec APKs built --with-gradle " "should never specify classes.dex") add('classes.dex', File(classes_dex)) return jarrer
def main(): parser = ArgumentParser() parser.add_argument( "-D", dest="defines", action="append", metavar="VAR[=VAL]", help="Define a variable", ) parser.add_argument( "--format", default="omni", help="Choose the chrome format for packaging " + "(omni, jar or flat ; default: %(default)s)", ) parser.add_argument("--removals", default=None, help="removed-files source file") parser.add_argument( "--ignore-errors", action="store_true", default=False, help="Transform errors into warnings.", ) parser.add_argument( "--ignore-broken-symlinks", action="store_true", default=False, help="Do not fail when processing broken symlinks.", ) parser.add_argument( "--minify", action="store_true", default=False, help="Make some files more compact while packaging", ) parser.add_argument( "--minify-js", action="store_true", help="Minify JavaScript files while packaging.", ) parser.add_argument( "--js-binary", help="Path to js binary. This is used to verify " "minified JavaScript. If this is not defined, " "minification verification will not be performed.", ) parser.add_argument("--jarlog", default="", help="File containing jar " + "access logs") parser.add_argument( "--compress", choices=("none", "deflate"), default="deflate", help="Use given jar compression (default: deflate)", ) parser.add_argument("manifest", default=None, nargs="?", help="Manifest file name") parser.add_argument("source", help="Source directory") parser.add_argument("destination", help="Destination directory") parser.add_argument( "--non-resource", nargs="+", metavar="PATTERN", default=[], help="Extra files not to be considered as resources", ) args = parser.parse_args() defines = dict(buildconfig.defines["ALLDEFINES"]) if args.ignore_errors: errors.ignore_errors() if args.defines: for name, value in [split_define(d) for d in args.defines]: defines[name] = value compress = { "none": False, "deflate": True, }[args.compress] copier = FileCopier() if args.format == "flat": formatter = FlatFormatter(copier) elif args.format == "jar": formatter = JarFormatter(copier, compress=compress) elif args.format == "omni": formatter = OmniJarFormatter( copier, buildconfig.substs["OMNIJAR_NAME"], compress=compress, non_resources=args.non_resource, ) else: errors.fatal("Unknown format: %s" % args.format) # Adjust defines according to the requested format. if isinstance(formatter, OmniJarFormatter): defines["MOZ_OMNIJAR"] = 1 elif "MOZ_OMNIJAR" in defines: del defines["MOZ_OMNIJAR"] respath = "" if "RESPATH" in defines: respath = SimpleManifestSink.normalize_path(defines["RESPATH"]) while respath.startswith("/"): respath = respath[1:] with errors.accumulate(): finder_args = dict( minify=args.minify, minify_js=args.minify_js, ignore_broken_symlinks=args.ignore_broken_symlinks, ) if args.js_binary: finder_args["minify_js_verify_command"] = [ args.js_binary, os.path.join(os.path.abspath(os.path.dirname(__file__)), "js-compare-ast.js"), ] finder = PackagerFileFinder(args.source, find_executables=True, **finder_args) if "NO_PKG_FILES" in os.environ: sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None) else: sinkformatter = formatter sink = SimpleManifestSink(finder, sinkformatter) if args.manifest: preprocess_manifest(sink, args.manifest, defines) else: sink.add(Component(""), "bin/*") sink.close(args.manifest is not None) if args.removals: removals_in = StringIO(open(args.removals).read()) removals_in.name = args.removals removals = RemovedFiles(copier) preprocess(removals_in, removals, defines) copier.add(mozpath.join(respath, "removed-files"), removals) # If a pdb file is present and we were instructed to copy it, include it. # Run on all OSes to capture MinGW builds if buildconfig.substs.get("MOZ_COPY_PDBS"): # We want to mutate the copier while we're iterating through it, so copy # the items to a list first. copier_items = [(p, f) for p, f in copier] for p, f in copier_items: if isinstance(f, ExecutableFile): pdbname = os.path.splitext(f.inputs()[0])[0] + ".pdb" if os.path.exists(pdbname): copier.add(os.path.basename(pdbname), File(pdbname)) # Setup preloading if args.jarlog: if not os.path.exists(args.jarlog): raise Exception("Cannot find jar log: %s" % args.jarlog) omnijars = [] if isinstance(formatter, OmniJarFormatter): omnijars = [ mozpath.join(base, buildconfig.substs["OMNIJAR_NAME"]) for base in sink.packager.get_bases(addons=False) ] from mozpack.mozjar import JarLog log = JarLog(args.jarlog) for p, f in copier: if not isinstance(f, Jarrer): continue if respath: p = mozpath.relpath(p, respath) if p in log: f.preload(log[p]) elif p in omnijars: raise Exception("No jar log data for %s" % p) copier.copy(args.destination) generate_precomplete( os.path.normpath(os.path.join(args.destination, respath)))
def main(): parser = ArgumentParser() parser.add_argument('-D', dest='defines', action='append', metavar="VAR[=VAL]", help='Define a variable') parser.add_argument('--format', default='omni', help='Choose the chrome format for packaging ' + '(omni, jar or flat ; default: %(default)s)') parser.add_argument('--removals', default=None, help='removed-files source file') parser.add_argument('--ignore-errors', action='store_true', default=False, help='Transform errors into warnings.') parser.add_argument('--ignore-broken-symlinks', action='store_true', default=False, help='Do not fail when processing broken symlinks.') parser.add_argument('--minify', action='store_true', default=False, help='Make some files more compact while packaging') parser.add_argument('--minify-js', action='store_true', help='Minify JavaScript files while packaging.') parser.add_argument('--js-binary', help='Path to js binary. This is used to verify ' 'minified JavaScript. If this is not defined, ' 'minification verification will not be performed.') parser.add_argument('--jarlog', default='', help='File containing jar ' + 'access logs') parser.add_argument('--compress', choices=('none', 'deflate', 'brotli'), default='deflate', help='Use given jar compression (default: deflate)') parser.add_argument('manifest', default=None, nargs='?', help='Manifest file name') parser.add_argument('source', help='Source directory') parser.add_argument('destination', help='Destination directory') parser.add_argument('--non-resource', nargs='+', metavar='PATTERN', default=[], help='Extra files not to be considered as resources') args = parser.parse_args() defines = dict(buildconfig.defines['ALLDEFINES']) if args.ignore_errors: errors.ignore_errors() if args.defines: for name, value in [split_define(d) for d in args.defines]: defines[name] = value compress = { 'none': False, 'deflate': True, 'brotli': JAR_BROTLI, }[args.compress] copier = FileCopier() if args.format == 'flat': formatter = FlatFormatter(copier) elif args.format == 'jar': formatter = JarFormatter(copier, compress=compress) elif args.format == 'omni': formatter = OmniJarFormatter(copier, buildconfig.substs['OMNIJAR_NAME'], compress=compress, non_resources=args.non_resource) else: errors.fatal('Unknown format: %s' % args.format) # Adjust defines according to the requested format. if isinstance(formatter, OmniJarFormatter): defines['MOZ_OMNIJAR'] = 1 elif 'MOZ_OMNIJAR' in defines: del defines['MOZ_OMNIJAR'] respath = '' if 'RESPATH' in defines: respath = SimpleManifestSink.normalize_path(defines['RESPATH']) while respath.startswith('/'): respath = respath[1:] if not buildconfig.substs['CROSS_COMPILE']: launcher.tooldir = mozpath.join(buildconfig.topobjdir, 'dist') with errors.accumulate(): finder_args = dict( minify=args.minify, minify_js=args.minify_js, ignore_broken_symlinks=args.ignore_broken_symlinks, ) if args.js_binary: finder_args['minify_js_verify_command'] = [ args.js_binary, os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js-compare-ast.js') ] finder = FileFinder(args.source, find_executables=True, **finder_args) if 'NO_PKG_FILES' in os.environ: sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None) else: sinkformatter = formatter sink = SimpleManifestSink(finder, sinkformatter) if args.manifest: preprocess_manifest(sink, args.manifest, defines) else: sink.add(Component(''), 'bin/*') sink.close(args.manifest is not None) if args.removals: removals_in = StringIO(open(args.removals).read()) removals_in.name = args.removals removals = RemovedFiles(copier) preprocess(removals_in, removals, defines) copier.add(mozpath.join(respath, 'removed-files'), removals) # shlibsign libraries if launcher.can_launch(): if not mozinfo.isMac and buildconfig.substs.get('COMPILE_ENVIRONMENT'): for lib in SIGN_LIBS: libbase = mozpath.join(respath, '%s%s') \ % (buildconfig.substs['DLL_PREFIX'], lib) libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX']) if copier.contains(libname): copier.add(libbase + '.chk', LibSignFile(os.path.join(args.destination, libname))) # If a pdb file is present and we were instructed to copy it, include it. # Run on all OSes to capture MinGW builds if buildconfig.substs.get('MOZ_COPY_PDBS'): for p, f in copier: if isinstance(f, ExecutableFile): pdbname = os.path.splitext(f.inputs()[0])[0] + '.pdb' if os.path.exists(pdbname): copier.add(os.path.basename(pdbname), File(pdbname)) # Setup preloading if args.jarlog: if not os.path.exists(args.jarlog): raise Exception('Cannot find jar log: %s' % args.jarlog) omnijars = [] if isinstance(formatter, OmniJarFormatter): omnijars = [mozpath.join(base, buildconfig.substs['OMNIJAR_NAME']) for base in sink.packager.get_bases(addons=False)] from mozpack.mozjar import JarLog log = JarLog(args.jarlog) for p, f in copier: if not isinstance(f, Jarrer): continue if respath: p = mozpath.relpath(p, respath) if p in log: f.preload(log[p]) elif p in omnijars: raise Exception('No jar log data for %s' % p) copier.copy(args.destination) generate_precomplete(os.path.normpath(os.path.join(args.destination, respath)))
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None, lib_dirs=[], assets_dirs=[], features_dirs=[], root_files=[], verbose=False): jarrer = Jarrer(optimize=False) # First, take input files. The contents of the later files overwrites the # content of earlier files. for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % \ (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for features_dir in features_dirs: finder = FileFinder(features_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('assets', 'features', p), f, False) for assets_dir in assets_dirs: finder = FileFinder(assets_dir, find_executables=False) for p, f in finder.find('**'): compress = None # Take default from Jarrer. if p.endswith('.so'): # Asset libraries are special. if f.open().read(5)[1:] == '7zXZ': print('%s is already compressed' % p) # We need to store (rather than deflate) compressed libraries # (even if we don't compress them ourselves). compress = False elif buildconfig.substs.get('XZ'): cmd = [ buildconfig.substs.get('XZ'), '-zkf', mozpath.join(finder.base, p) ] bcj = None if buildconfig.substs.get('MOZ_THUMB2'): bcj = '--armthumb' elif buildconfig.substs.get('CPU_ARCH') == 'arm': bcj = '--arm' elif buildconfig.substs.get('CPU_ARCH') == 'x86': bcj = '--x86' if bcj: cmd.extend([bcj, '--lzma2']) print('xz-compressing %s with %s' % (p, ' '.join(cmd))) subprocess.check_output(cmd) os.rename(f.path + '.xz', f.path) compress = False add(mozpath.join('assets', p), f, compress=compress) for lib_dir in lib_dirs: finder = FileFinder(lib_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) if classes_dex: add('classes.dex', File(classes_dex)) return jarrer
def package_fennec_apk(inputs=[], omni_ja=None, lib_dirs=[], assets_dirs=[], features_dirs=[], root_files=[], verbose=False): jarrer = Jarrer() # First, take input files. The contents of the later files overwrites the # content of earlier files. Multidexing requires special care: we want a # coherent set of classesN.dex files, so we only take DEX files from a # single input. This avoids taking, say, classes{1,2,3}.dex from the first # input and only classes{1,2}.dex from the second input, leading to # (potentially) duplicated symbols at runtime. last_input_with_dex_files = None for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if mozpath.match(path, '/classes*.dex'): last_input_with_dex_files = input continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) # If we have an input with DEX files, take them all here. if last_input_with_dex_files: jar = JarReader(last_input_with_dex_files) for file in jar: path = file.filename if not mozpath.match(path, '/classes*.dex'): continue if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for features_dir in features_dirs: finder = FileFinder(features_dir) for p, f in finder.find('**'): add(mozpath.join('assets', 'features', p), f, False) for assets_dir in assets_dirs: finder = FileFinder(assets_dir) for p, f in finder.find('**'): add(mozpath.join('assets', p), f) for lib_dir in lib_dirs: finder = FileFinder(lib_dir) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) return jarrer
def test_file_no_write(self): """ Test various conditions where File.copy is expected not to write in the destination file. """ src = self.tmppath("src") dest = self.tmppath("dest") with open(src, "wb") as tmp: tmp.write(b"test") # Initial copy f = File(src) f.copy(dest) # Ensure subsequent copies won't trigger writes f.copy(DestNoWrite(dest)) self.assertEqual(b"test", open(dest, "rb").read()) # When the source file is newer, but with the same content, no copy # should occur time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) f.copy(DestNoWrite(dest)) self.assertEqual(b"test", open(dest, "rb").read()) # When the source file is older than the destination file, even with # different content, no copy should occur. with open(src, "wb") as tmp: tmp.write(b"fooo") time = os.path.getmtime(dest) - 1 os.utime(src, (time, time)) f.copy(DestNoWrite(dest)) self.assertEqual(b"test", open(dest, "rb").read()) # Double check that under conditions where a copy occurs, we would get # an exception. time = os.path.getmtime(src) - 1 os.utime(dest, (time, time)) self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) # skip_if_older=False is expected to force a copy in this situation. f.copy(dest, skip_if_older=False) self.assertEqual(b"fooo", open(dest, "rb").read())
def prepare_zip_from(archive, tmpdir): if archive.startswith("http"): resp = requests.get(archive, allow_redirects=True, stream=True) resp.raise_for_status() reader = resp.raw # Work around taskcluster generic-worker possibly gzipping the tar.zst. if resp.headers.get("Content-Encoding") == "gzip": reader = gzip.GzipFile(fileobj=reader) else: reader = open(archive, "rb") ctx = zstandard.ZstdDecompressor() uncompressed = ctx.stream_reader(reader) with tarfile.open( mode="r|", fileobj=uncompressed, bufsize=1024 * 1024 ) as tar: while True: info = tar.next() if info is None: break log.info(info.name) data = tar.extractfile(info) path = os.path.join(tmpdir, info.name.lstrip("/")) if info.name.endswith(".dbg"): os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, "wb") as fh: with gzip.GzipFile( fileobj=fh, mode="wb", compresslevel=5 ) as c: shutil.copyfileobj(data, c) jar.add(info.name + ".gz", File(path), compress=False) elif info.name.endswith(".dSYM.tar"): import bz2 os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, "wb") as fh: c = bz2.BZ2Compressor() while True: buf = data.read(16384) if not buf: break fh.write(c.compress(buf)) fh.write(c.flush()) jar.add(info.name + ".bz2", File(path), compress=False) elif info.name.endswith((".pdb", ".exe", ".dll")): import subprocess makecab = os.environ.get("MAKECAB", "makecab") os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, "wb") as fh: shutil.copyfileobj(data, fh) subprocess.check_call( [makecab, "-D", "CompressionType=MSZIP", path, path + "_"], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, ) jar.add(info.name[:-1] + "_", File(path + "_"), compress=False) else: jar.add(info.name, data) reader.close()