def test_rejar(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add("foo", b"foo") jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz") jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False) new = MockDest() with JarWriter(fileobj=new) as jar: for j in JarReader(fileobj=s): jar.add(j.filename, j) jar = JarReader(fileobj=new) files = [j for j in jar] self.assertEqual(files[0].filename, "foo") self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), b"foo") self.assertEqual(files[1].filename, "bar") self.assertTrue(files[1].compressed) self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") self.assertEqual(files[2].filename, "baz/qux") self.assertTrue(files[2].compressed) self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
def test_rejar(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add('foo', 'foo') jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False) new = MockDest() with JarWriter(fileobj=new) as jar: for j in JarReader(fileobj=s): jar.add(j.filename, j) jar = JarReader(fileobj=new) files = [j for j in jar] self.assertEqual(files[0].filename, 'foo') self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), 'foo') self.assertEqual(files[1].filename, 'bar') self.assertTrue(files[1].compressed) self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz') self.assertEqual(files[2].filename, 'baz/qux') self.assertTrue(files[2].compressed) self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
def test_jar_finder(self): self.jar = JarWriter(file=self.tmppath('test.jar')) self.prepare_match_test() self.jar.finish() reader = JarReader(file=self.tmppath('test.jar')) self.finder = JarFinder(self.tmppath('test.jar'), reader) self.do_match_test()
def test_jar_finder(self): self.jar = JarWriter(file=self.tmppath("test.jar")) self.prepare_match_test() self.jar.finish() reader = JarReader(file=self.tmppath("test.jar")) self.finder = JarFinder(self.tmppath("test.jar"), reader) self.do_match_test() self.assertIsNone(self.finder.get("does-not-exist")) self.assertIsInstance(self.finder.get("bar"), DeflatedFile)
class TestJarFinder(MatchTestTemplate, TestWithTmpDir): def add(self, path): self.jar.add(path, path, compress=True) def do_check(self, pattern, result): do_check(self, self.finder, pattern, result) def test_jar_finder(self): self.jar = JarWriter(file=self.tmppath('test.jar')) self.prepare_match_test() self.jar.finish() reader = JarReader(file=self.tmppath('test.jar')) self.finder = JarFinder(self.tmppath('test.jar'), reader) self.do_match_test()
def process_symbols_archive(self, filename, processed_filename): ArtifactJob.process_symbols_archive(self, filename, processed_filename, skip_compressed=True) if self._symbols_archive_suffix != 'crashreporter-symbols-full.zip': return import gzip with JarWriter(file=processed_filename, compress_level=5) as writer: reader = JarReader(filename) for filename in reader.entries: if not filename.endswith('.gz'): continue # Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz" into "libxul.so.dbg". # # After `settings append target.debug-file-search-paths /path/to/topobjdir/dist/crashreporter-symbols`, # Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files. # # There are other paths that will work but none seem more desireable. See # https://github.com/llvm-mirror/lldb/blob/882670690ca69d9dd96b7236c620987b11894af9/source/Host/common/Symbols.cpp#L324. basename = os.path.basename(filename).replace('.gz', '') destpath = mozpath.join('crashreporter-symbols', basename) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding uncompressed ELF debug symbol file {destpath} to processed archive') writer.add(destpath.encode('utf-8'), gzip.GzipFile(fileobj=reader[filename].uncompressed_data))
def process_tests_zip_artifact(self, filename, processed_filename): from mozbuild.action.test_archive import OBJDIR_TEST_FILES added_entry = False with JarWriter(file=processed_filename, compress_level=5) as writer: reader = JarReader(filename) for filename, entry in reader.entries.iteritems(): for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns: if not mozpath.match(filename, pattern): continue destpath = mozpath.relpath(filename, src_prefix) destpath = mozpath.join(dest_prefix, destpath) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) added_entry = True break for files_entry in OBJDIR_TEST_FILES.values(): origin_pattern = files_entry['pattern'] leaf_filename = filename if 'dest' in files_entry: dest = files_entry['dest'] origin_pattern = mozpath.join(dest, origin_pattern) leaf_filename = filename[len(dest) + 1:] if mozpath.match(filename, origin_pattern): destpath = mozpath.join('..', files_entry['base'], leaf_filename) mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) if not added_entry: raise ValueError('Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=LinuxArtifactJob.test_artifact_patterns))
def test_deflated_file_no_write(self): """ Test various conditions where DeflatedFile.copy is expected not to write in the destination file. """ src = self.tmppath("src.jar") dest = self.tmppath("dest") with JarWriter(src) as jar: jar.add("test", b"test") jar.add("test2", b"test") jar.add("fooo", b"fooo") jar = JarReader(src) # Initial copy f = DeflatedFile(jar["test"]) f.copy(dest) # Ensure subsequent copies won't trigger writes f.copy(DestNoWrite(dest)) self.assertEqual(b"test", open(dest, "rb").read()) # When using a different file with the same content, no copy should # occur f = DeflatedFile(jar["test2"]) f.copy(DestNoWrite(dest)) self.assertEqual(b"test", open(dest, "rb").read()) # Double check that under conditions where a copy occurs, we would get # an exception. f = DeflatedFile(jar["fooo"]) self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
def test_deflated_file(self): """ Check that DeflatedFile.copy yields the proper content in the destination file in all situations that trigger different code paths (see TestFile.test_file) """ src = self.tmppath("src.jar") dest = self.tmppath("dest") contents = {} with JarWriter(src) as jar: for content in samples: name = "".join( random.choice( "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" ) for i in range(8) ) jar.add(name, content, compress=True) contents[name] = content for j in JarReader(src): f = DeflatedFile(j) f.copy(dest) self.assertEqual(contents[j.filename], open(dest, "rb").read())
def process_package_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: with tarfile.open(filename) as reader: for p, f in UnpackFinder(TarFinder(filename, reader)): if not any( mozpath.match(p, pat) for pat in self.package_artifact_patterns): continue # We strip off the relative "firefox/" bit from the path, # but otherwise preserve it. destpath = mozpath.join('bin', mozpath.relpath(p, "firefox")) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode) added_entry = True if not added_entry: raise ValueError( 'Archive format changed! No pattern from "{patterns}" ' 'matched an archive path.'.format( patterns=LinuxArtifactJob.package_artifact_patterns))
def main(argv): parser = argparse.ArgumentParser(description='Produce test archives') parser.add_argument('archive', help='Which archive to generate') parser.add_argument('outputfile', help='File to write output to') args = parser.parse_args(argv) if not args.outputfile.endswith('.zip'): raise Exception('expected zip output file') # Adjust reftest entries only if processing reftests (because it is # unnecessary overhead otherwise). if args.archive == 'reftest': insert_reftest_entries(ARCHIVE_FILES['reftest']) file_count = 0 t_start = time.time() ensureParentDir(args.outputfile) with open(args.outputfile, 'wb') as fh: # Experimentation revealed that level 5 is significantly faster and has # marginally larger sizes than higher values and is the sweet spot # for optimal compression. Read the detailed commit message that # introduced this for raw numbers. with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer: res = find_files(args.archive) for p, f in res: file_count += 1 writer.add(p.encode('utf-8'), f.read(), mode=f.mode) duration = time.time() - t_start zip_size = os.path.getsize(args.outputfile) basename = os.path.basename(args.outputfile) print('Wrote %d files in %d bytes to %s in %.2fs' % (file_count, zip_size, basename, duration))
def test_deflated_file_no_write(self): ''' Test various conditions where DeflatedFile.copy is expected not to write in the destination file. ''' src = self.tmppath('src.jar') dest = self.tmppath('dest') with JarWriter(src) as jar: jar.add('test', 'test') jar.add('test2', 'test') jar.add('fooo', 'fooo') jar = JarReader(src) # Initial copy f = DeflatedFile(jar['test']) f.copy(dest) # Ensure subsequent copies won't trigger writes f.copy(DestNoWrite(dest)) self.assertEqual('test', open(dest, 'rb').read()) # When using a different file with the same content, no copy should # occur f = DeflatedFile(jar['test2']) f.copy(DestNoWrite(dest)) self.assertEqual('test', open(dest, 'rb').read()) # Double check that under conditions where a copy occurs, we would get # an exception. f = DeflatedFile(jar['fooo']) self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
def process_tests_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: reader = JarReader(filename) for filename, entry in reader.entries.iteritems(): for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns: if not mozpath.match(filename, pattern): continue destpath = mozpath.relpath(filename, src_prefix) destpath = mozpath.join(dest_prefix, destpath) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') mode = entry['external_attr'] >> 16 writer.add(destpath.encode('utf-8'), reader[filename], mode=mode) added_entry = True if not added_entry: raise ValueError( 'Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=LinuxArtifactJob.test_artifact_patterns))
def main(platform): build = MozbuildObject.from_environment() topsrcdir = build.topsrcdir distdir = build.distdir srcdir = os.path.join(topsrcdir, "b2g", "simulator") app_buildid = open(os.path.join(build.topobjdir, "buildid.h")).read().split(' ')[2] # The simulator uses a shorter version string, # it only keeps the major version digits A.B # whereas MOZ_B2G_VERSION is A.B.C.D b2g_version = build.config_environment.defines["MOZ_B2G_VERSION"].replace('"', '') version = ".".join(str(n) for n in LooseVersion(b2g_version).version[0:2]) # Build a gaia profile specific to the simulator in order to: # - disable the FTU # - set custom prefs to enable devtools debugger server # - set custom settings to disable lockscreen and screen timeout # - only ship production apps gaia_path = build.config_environment.substs["GAIADIR"] builder = GaiaBuilder(build, gaia_path) builder.clean() env = { "NOFTU": "1", "GAIA_APP_TARGET": "production", "SETTINGS_PATH": os.path.join(srcdir, "custom-settings.json") } builder.profile(env) builder.override_prefs(os.path.join(srcdir, "custom-prefs.js")) # Build the simulator addon xpi xpi_name = XPI_NAME % {"version": version, "platform": platform} xpi_path = os.path.join(distdir, xpi_name) update_path = "%s/%s" % (version, platform) update_url = UPDATE_URL % {"update_path": update_path} # Preprocess some files... manifest = os.path.join(build.topobjdir, "b2g", "simulator", "install.rdf") preprocess_file(os.path.join(srcdir, "install.rdf.in"), manifest, version, app_buildid, update_url) with JarWriter(xpi_path, optimize=False) as zip: # Ship addon files into the .xpi add_file_to_zip(zip, manifest, "install.rdf") add_file_to_zip(zip, os.path.join(srcdir, "bootstrap.js"), "bootstrap.js") add_file_to_zip(zip, os.path.join(srcdir, "icon.png"), "icon.png") add_file_to_zip(zip, os.path.join(srcdir, "icon64.png"), "icon64.png") # Ship b2g-desktop, but prevent its gaia profile to be shipped in the xpi add_dir_to_zip(zip, os.path.join(distdir, "b2g"), "b2g", ("gaia", "B2G.app/Contents/MacOS/gaia", "B2G.app/Contents/Resources/gaia")) # Then ship our own gaia profile add_dir_to_zip(zip, os.path.join(gaia_path, "profile"), "profile")
def process_host_bin(self, filename, processed_filename): with JarWriter(file=processed_filename, compress_level=5) as writer: # Turn 'HASH-mar.exe' into 'mar.exe'. `filename` is a path on disk # without any of the path parts of the artifact, so we must inject # the desired `host/bin` prefix here. orig_basename = os.path.basename(filename).split('-', 1)[1] destpath = mozpath.join('host/bin', orig_basename) writer.add(destpath.encode('utf-8'), open(filename, 'rb'))
class TestJarFinder(MatchTestTemplate, TestWithTmpDir): def add(self, path): self.jar.add(path, ensure_bytes(path), compress=True) def do_check(self, pattern, result): do_check(self, self.finder, pattern, result) def test_jar_finder(self): self.jar = JarWriter(file=self.tmppath("test.jar")) self.prepare_match_test() self.jar.finish() reader = JarReader(file=self.tmppath("test.jar")) self.finder = JarFinder(self.tmppath("test.jar"), reader) self.do_match_test() self.assertIsNone(self.finder.get("does-not-exist")) self.assertIsInstance(self.finder.get("bar"), DeflatedFile)
class TestJarFinder(MatchTestTemplate, TestWithTmpDir): def add(self, path): self.jar.add(path, path, compress=True) def do_check(self, pattern, result): do_check(self, self.finder, pattern, result) def test_jar_finder(self): self.jar = JarWriter(file=self.tmppath('test.jar')) self.prepare_match_test() self.jar.finish() reader = JarReader(file=self.tmppath('test.jar')) self.finder = JarFinder(self.tmppath('test.jar'), reader) self.do_match_test() self.assertIsNone(self.finder.get('does-not-exist')) self.assertIsInstance(self.finder.get('bar'), DeflatedFile)
def process_symbols_archive(self, filename, processed_filename): with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: reader = JarReader(filename) for filename in reader.entries: destpath = mozpath.join('crashreporter-symbols', filename) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') writer.add(destpath.encode('utf-8'), reader[filename])
def test_jar_finder(self): self.jar = JarWriter(file=self.tmppath('test.jar')) self.prepare_match_test() self.jar.finish() reader = JarReader(file=self.tmppath('test.jar')) self.finder = JarFinder(self.tmppath('test.jar'), reader) self.do_match_test() self.assertIsNone(self.finder.get('does-not-exist')) self.assertIsInstance(self.finder.get('bar'), DeflatedFile)
def test_deflated_file_open(self): ''' Test whether DeflatedFile.open returns an appropriately reset file object. ''' src = self.tmppath('src.jar') content = ''.join(samples) with JarWriter(src) as jar: jar.add('content', content) f = DeflatedFile(JarReader(src)['content']) self.assertEqual(content[:42], f.open().read(42)) self.assertEqual(content, f.open().read())
def test_deflated_file_open(self): """ Test whether DeflatedFile.open returns an appropriately reset file object. """ src = self.tmppath("src.jar") content = b"".join(samples) with JarWriter(src) as jar: jar.add("content", content) f = DeflatedFile(JarReader(src)["content"]) self.assertEqual(content[:42], f.open().read(42)) self.assertEqual(content, f.open().read())
def test_add_from_finder(self): s = MockDest() with JarWriter(fileobj=s) as jar: finder = FileFinder(test_data_path) for p, f in finder.find("test_data"): jar.add("test_data", f) jar = JarReader(fileobj=s) files = [j for j in jar] self.assertEqual(files[0].filename, "test_data") self.assertFalse(files[0].compressed) self.assertEqual(files[0].read(), b"test_data")
def make_archive(archive_name, base, exclude, include): compress = ['**/*.sym'] finder = FileFinder(base, ignore=exclude) if not include: include = ['*'] archive_basename = os.path.basename(archive_name) with open(archive_name, 'wb') as fh: with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer: for pat in include: for p, f in finder.find(pat): print(' Adding to "%s":\n\t"%s"' % (archive_basename, p)) should_compress = any(mozpath.match(p, pat) for pat in compress) writer.add(p.encode('utf-8'), f, mode=f.mode, compress=should_compress, skip_duplicates=True)
def test_preload(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add('foo', 'foo') jar.add('bar', 'abcdefghijklmnopqrstuvwxyz') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz') jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, None) with JarWriter(fileobj=s) as jar: jar.add('foo', 'foo') jar.add('bar', 'abcdefghijklmnopqrstuvwxyz') jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz') jar.preload(['baz/qux', 'bar']) jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, 'bar') files = [j for j in jar] self.assertEqual(files[0].filename, 'baz/qux') self.assertEqual(files[1].filename, 'bar') self.assertEqual(files[2].filename, 'foo')
def process_symbols_archive(self, filename, processed_filename, skip_compressed=False): with JarWriter(file=processed_filename, compress_level=5) as writer: reader = JarReader(filename) for filename in reader.entries: if skip_compressed and filename.endswith('.gz'): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Skipping compressed ELF debug symbol file {filename}') continue destpath = mozpath.join('crashreporter-symbols', filename) self.log(logging.INFO, 'artifact', {'destpath': destpath}, 'Adding {destpath} to processed archive') writer.add(destpath.encode('utf-8'), reader[filename])
def test_preload(self): s = MockDest() with JarWriter(fileobj=s) as jar: jar.add("foo", b"foo") jar.add("bar", b"abcdefghijklmnopqrstuvwxyz") jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz") jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, None) with JarWriter(fileobj=s) as jar: jar.add("foo", b"foo") jar.add("bar", b"abcdefghijklmnopqrstuvwxyz") jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz") jar.preload(["baz/qux", "bar"]) jar = JarReader(fileobj=s) self.assertEqual(jar.last_preloaded, "bar") files = [j for j in jar] self.assertEqual(files[0].filename, "baz/qux") self.assertEqual(files[1].filename, "bar") self.assertEqual(files[2].filename, "foo")
def process_artifact(self, filename, processed_filename): # Extract all .so files into the root, which will get copied into dist/bin. with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: for f in JarReader(filename): if not f.filename.endswith('.so') and \ not f.filename in ('platform.ini', 'application.ini'): continue basename = os.path.basename(f.filename) self.log(logging.INFO, 'artifact', {'basename': basename}, 'Adding {basename} to processed archive') basename = mozpath.join('bin', basename) writer.add(basename.encode('utf-8'), f)
def process_package_artifact(self, filename, processed_filename): # Extract all .so files into the root, which will get copied into dist/bin. with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))): if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns): continue dirname, basename = os.path.split(p) self.log(logging.INFO, 'artifact', {'basename': basename}, 'Adding {basename} to processed archive') basedir = 'bin' if not basename.endswith('.so'): basedir = mozpath.join('bin', dirname.lstrip('assets/')) basename = mozpath.join(basedir, basename) writer.add(basename.encode('utf-8'), f.open())
def process_package_artifact(self, filename, processed_filename): added_entry = False with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))): if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns): continue # strip off the relative "firefox/" bit from the path: basename = mozpath.relpath(p, "firefox") basename = mozpath.join('bin', basename) self.log(logging.INFO, 'artifact', {'basename': basename}, 'Adding {basename} to processed archive') writer.add(basename.encode('utf-8'), f.open(), mode=f.mode) added_entry = True if not added_entry: raise ValueError('Archive format changed! No pattern from "{patterns}"' 'matched an archive path.'.format( patterns=self.artifact_patterns))
def main(argv): parser = argparse.ArgumentParser(description='Produce test archives') parser.add_argument('archive', help='Which archive to generate') parser.add_argument('outputfile', help='File to write output to') args = parser.parse_args(argv) out_file = args.outputfile if not out_file.endswith(('.tar.gz', '.zip')): raise Exception('expected tar.gz or zip output file') file_count = 0 t_start = time.time() ensureParentDir(out_file) res = find_files(args.archive) with open(out_file, 'wb') as fh: # Experimentation revealed that level 5 is significantly faster and has # marginally larger sizes than higher values and is the sweet spot # for optimal compression. Read the detailed commit message that # introduced this for raw numbers. if out_file.endswith('.tar.gz'): files = dict(res) create_tar_gz_from_files(fh, files, compresslevel=5) file_count = len(files) elif out_file.endswith('.zip'): with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer: for p, f in res: writer.add(p.encode('utf-8'), f.read(), mode=f.mode, skip_duplicates=True) file_count += 1 else: raise Exception('unhandled file extension: %s' % out_file) duration = time.time() - t_start zip_size = os.path.getsize(args.outputfile) basename = os.path.basename(args.outputfile) print('Wrote %d files in %d bytes to %s in %.2fs' % (file_count, zip_size, basename, duration))
def test_deflated_file(self): ''' Check that DeflatedFile.copy yields the proper content in the destination file in all situations that trigger different code paths (see TestFile.test_file) ''' src = self.tmppath('src.jar') dest = self.tmppath('dest') contents = {} with JarWriter(src) as jar: for content in samples: name = ''.join( random.choice(string.letters) for i in xrange(8)) jar.add(name, content, compress=True) contents[name] = content for j in JarReader(src): f = DeflatedFile(j) f.copy(dest) self.assertEqual(contents[j.filename], open(dest, 'rb').read())
def write_zip(zip_path, prefix=None): """Write toolchain data to a zip file.""" prefix = six.ensure_binary(prefix, encoding="utf-8") with JarWriter(file=zip_path, compress_level=5) as zip: manifest = {} for p, data, mode in resolve_files_and_hash(manifest): print(p) if prefix: p = mozpath.join(prefix, p) zip.add(p, data, mode=mode) sha256_manifest = format_manifest(manifest) sdk_path = b"SDK_VERSION" sha256_path = b"MANIFEST.SHA256" if prefix: sdk_path = mozpath.join(prefix, sdk_path) sha256_path = mozpath.join(prefix, sha256_path) zip.add(sdk_path, SDK_RELEASE.encode("utf-8")) zip.add(sha256_path, sha256_manifest)