def test_faster_recursive_make(self): substs = list(BASE_SUBSTS) + [ ("BUILD_BACKENDS", "FasterMake+RecursiveMake"), ] with self.do_test_backend( get_backend_class("FasterMake+RecursiveMake"), substs=substs) as config: buildid = mozpath.join(config.topobjdir, "config", "buildid") ensureParentDir(buildid) with open(buildid, "w") as fh: fh.write("20100101012345\n") build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) build._config_environment = config overrides = [ "install_manifest_depends=", "MOZ_JAR_MAKER_FILE_FORMAT=flat", "TEST_MOZBUILD=1", ] with self.line_handler() as handle_make_line: build._run_make( directory=config.topobjdir, target=overrides, silent=False, line_handler=handle_make_line, ) self.validate(config)
def main(argv): parser = argparse.ArgumentParser( description='Produce test archives') parser.add_argument('archive', help='Which archive to generate') parser.add_argument('outputfile', help='File to write output to') args = parser.parse_args(argv) if not args.outputfile.endswith('.zip'): raise Exception('expected zip output file') file_count = 0 t_start = time.time() ensureParentDir(args.outputfile) with open(args.outputfile, 'wb') as fh: # Experimentation revealed that level 5 is significantly faster and has # marginally larger sizes than higher values and is the sweet spot # for optimal compression. Read the detailed commit message that # introduced this for raw numbers. with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer: res = find_files(args.archive) for p, f in res: writer.add(p.encode('utf-8'), f.read(), mode=f.mode, skip_duplicates=True) file_count += 1 duration = time.time() - t_start zip_size = os.path.getsize(args.outputfile) basename = os.path.basename(args.outputfile) print('Wrote %d files in %d bytes to %s in %.2fs' % ( file_count, zip_size, basename, duration))
def explode(aar, destdir): # Take just the support-v4-22.2.1 part. name, _ = os.path.splitext(os.path.basename(aar)) destdir = mozpath.join(destdir, name) if os.path.exists(destdir): # We always want to start fresh. shutil.rmtree(destdir) ensureParentDir(destdir) with zipfile.ZipFile(aar) as zf: zf.extractall(destdir) # classes.jar is always present. However, multiple JAR files with the same # name confuses our staged Proguard process in # mobile/android/base/Makefile.in, so we make the names unique here. classes_jar = mozpath.join(destdir, name + '-classes.jar') os.rename(mozpath.join(destdir, 'classes.jar'), classes_jar) # Embedded JAR libraries are optional. finder = FileFinder(mozpath.join(destdir, 'libs')) for p, _ in finder.find('*.jar'): jar = mozpath.join(finder.base, name + '-' + p) os.rename(mozpath.join(finder.base, p), jar) # Frequently assets/ is present but empty. Protect against meaningless # changes to the AAR files by deleting empty assets/ directories. assets = mozpath.join(destdir, 'assets') try: os.rmdir(assets) except OSError, e: if e.errno in (errno.ENOTEMPTY, errno.ENOENT): pass else: raise
def main(argv): parser = argparse.ArgumentParser(description='Produce test archives') parser.add_argument('archive', help='Which archive to generate') parser.add_argument('outputfile', help='File to write output to') args = parser.parse_args(argv) if not args.outputfile.endswith('.zip'): raise Exception('expected zip output file') # Adjust reftest entries only if processing reftests (because it is # unnecessary overhead otherwise). if args.archive == 'reftest': insert_reftest_entries(ARCHIVE_FILES['reftest']) file_count = 0 t_start = time.time() ensureParentDir(args.outputfile) with open(args.outputfile, 'wb') as fh: # Experimentation revealed that level 5 is significantly faster and has # marginally larger sizes than higher values and is the sweet spot # for optimal compression. Read the detailed commit message that # introduced this for raw numbers. with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer: res = find_files(args.archive) for p, f in res: file_count += 1 writer.add(p.encode('utf-8'), f.read(), mode=f.mode) duration = time.time() - t_start zip_size = os.path.getsize(args.outputfile) basename = os.path.basename(args.outputfile) print('Wrote %d files in %d bytes to %s in %.2fs' % (file_count, zip_size, basename, duration))
def repackage_installer(topsrcdir, tag, setupexe, package, output): if package and not zipfile.is_zipfile(package): raise Exception("Package file %s is not a valid .zip file." % package) # We need the full path for the tag and output, since we chdir later. tag = mozpath.realpath(tag) output = mozpath.realpath(output) ensureParentDir(output) tmpdir = tempfile.mkdtemp() old_cwd = os.getcwd() try: if package: z = zipfile.ZipFile(package) z.extractall(tmpdir) z.close() # Copy setup.exe into the root of the install dir, alongside the # package. shutil.copyfile(setupexe, mozpath.join(tmpdir, mozpath.basename(setupexe))) # archive_exe requires us to be in the directory where the package is # unpacked (the tmpdir) os.chdir(tmpdir) sfx_package = mozpath.join(topsrcdir, 'other-licenses/7zstub/firefox/7zSD.sfx') package_name = 'firefox' if package else None archive_exe(package_name, tag, sfx_package, output) finally: os.chdir(old_cwd) shutil.rmtree(tmpdir)
def updateManifest(self, manifestPath, chromebasepath, register): '''updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. ''' ensureParentDir(manifestPath) lock = lock_file(manifestPath + '.lck') try: myregister = dict.fromkeys( map(lambda s: s.replace('%', chromebasepath), register)) manifestExists = os.path.isfile(manifestPath) mode = manifestExists and 'r+b' or 'wb' mf = open(manifestPath, mode) if manifestExists: # import previous content into hash, ignoring empty ones and comments imf = re.compile('(#.*)?$') for l in re.split('[\r\n]+', mf.read()): if imf.match(l): continue myregister[l] = None mf.seek(0) for k in sorted(myregister.iterkeys()): mf.write(k + os.linesep) mf.close() finally: lock = None
def explode(aar, destdir): # Take just the support-v4-22.2.1 part. name, _ = os.path.splitext(os.path.basename(aar)) destdir = mozpath.join(destdir, name) if os.path.exists(destdir): # We always want to start fresh. shutil.rmtree(destdir) ensureParentDir(destdir) with zipfile.ZipFile(aar) as zf: zf.extractall(destdir) # classes.jar is always present. However, multiple JAR files with the same # name confuses our staged Proguard process in # mobile/android/base/Makefile.in, so we make the names unique here. classes_jar = mozpath.join(destdir, name + '-classes.jar') os.rename(mozpath.join(destdir, 'classes.jar'), classes_jar) # Embedded JAR libraries are optional. finder = FileFinder(mozpath.join(destdir, 'libs'), find_executables=False) for p, _ in finder.find('*.jar'): jar = mozpath.join(finder.base, name + '-' + p) os.rename(mozpath.join(finder.base, p), jar) # Frequently assets/ is present but empty. Protect against meaningless # changes to the AAR files by deleting empty assets/ directories. assets = mozpath.join(destdir, 'assets') try: os.rmdir(assets) except OSError, e: if e.errno in (errno.ENOTEMPTY, errno.ENOENT): pass else: raise
def test_faster_recursive_make(self): substs = list(BASE_SUBSTS) + [ ('BUILD_BACKENDS', 'FasterMake+RecursiveMake'), ] with self.do_test_backend( get_backend_class('FasterMake+RecursiveMake'), substs=substs) as config: buildid = mozpath.join(config.topobjdir, 'config', 'buildid') ensureParentDir(buildid) with open(buildid, 'w') as fh: fh.write('20100101012345\n') build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) overrides = [ 'install_manifest_depends=', 'MOZ_JAR_MAKER_FILE_FORMAT=flat', 'TEST_MOZBUILD=1', ] with self.line_handler() as handle_make_line: build._run_make(directory=config.topobjdir, target=overrides, silent=False, line_handler=handle_make_line) self.validate(config)
def test_faster_make(self): substs = list(BASE_SUBSTS) + [ ('MOZ_BUILD_APP', 'dummy_app'), ('MOZ_WIDGET_TOOLKIT', 'dummy_widget'), ] with self.do_test_backend(RecursiveMakeBackend, FasterMakeBackend, substs=substs) as config: buildid = mozpath.join(config.topobjdir, 'config', 'buildid') ensureParentDir(buildid) with open(buildid, 'w') as fh: fh.write('20100101012345\n') build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) overrides = [ 'TEST_MOZBUILD=1', ] with self.line_handler() as handle_make_line: build._run_make(directory=mozpath.join(config.topobjdir, 'faster'), target=overrides, silent=False, line_handler=handle_make_line) self.validate(config)
def updateManifest(self, manifestPath, chromebasepath, register): '''updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. ''' ensureParentDir(manifestPath) lock = lock_file(manifestPath + '.lck') try: myregister = dict.fromkeys(map(lambda s: s.replace('%', chromebasepath), register.iterkeys())) manifestExists = os.path.isfile(manifestPath) mode = manifestExists and 'r+b' or 'wb' mf = open(manifestPath, mode) if manifestExists: # import previous content into hash, ignoring empty ones and comments imf = re.compile('(#.*)?$') for l in re.split('[\r\n]+', mf.read()): if imf.match(l): continue myregister[l] = None mf.seek(0) for k in sorted(myregister.iterkeys()): mf.write(k + os.linesep) mf.close() finally: lock = None
def create_dmg_from_staged(stagedir, output_dmg, tmpdir, volume_name): 'Given a prepared directory stagedir, produce a DMG at output_dmg.' if not is_linux: # Running on OS X hybrid = os.path.join(tmpdir, 'hybrid.dmg') subprocess.check_call(['hdiutil', 'makehybrid', '-hfs', '-hfs-volume-name', volume_name, '-hfs-openfolder', stagedir, '-ov', stagedir, '-o', hybrid]) subprocess.check_call(['hdiutil', 'convert', '-format', 'UDBZ', '-imagekey', 'bzip2-level=9', '-ov', hybrid, '-o', output_dmg]) else: # The dmg tool doesn't create the destination directories, and silently # returns success if the parent directory doesn't exist. ensureParentDir(output_dmg) hfs = os.path.join(tmpdir, 'staged.hfs') subprocess.check_call([ buildconfig.substs['HFS_TOOL'], hfs, 'addall', stagedir]) subprocess.check_call([ buildconfig.substs['DMG_TOOL'], 'build', hfs, output_dmg ], # dmg is seriously chatty stdout=open(os.devnull, 'wb'))
def repackage_mar( topsrcdir, package, mar, output, mar_format="lzma", arch=None, mar_channel_id=None ): if not zipfile.is_zipfile(package) and not tarfile.is_tarfile(package): raise Exception("Package file %s is not a valid .zip or .tar file." % package) if arch and arch not in _BCJ_OPTIONS: raise Exception("Unknown architecture {}, available architectures: {}".format( arch, list(_BCJ_OPTIONS.keys()))) ensureParentDir(output) tmpdir = tempfile.mkdtemp() try: if tarfile.is_tarfile(package): z = tarfile.open(package) z.extractall(tmpdir) filelist = z.getnames() z.close() else: z = zipfile.ZipFile(package) z.extractall(tmpdir) filelist = z.namelist() z.close() toplevel_dirs = set([mozpath.split(f)[0] for f in filelist]) excluded_stuff = set([' ', '.background', '.DS_Store', '.VolumeIcon.icns']) toplevel_dirs = toplevel_dirs - excluded_stuff # Make sure the .zip file just contains a directory like 'firefox/' at # the top, and find out what it is called. if len(toplevel_dirs) != 1: raise Exception("Package file is expected to have a single top-level directory" "(eg: 'firefox'), not: %s" % toplevel_dirs) ffxdir = mozpath.join(tmpdir, toplevel_dirs.pop()) make_full_update = mozpath.join(topsrcdir, 'tools/update-packaging/make_full_update.sh') env = os.environ.copy() env['MOZ_PRODUCT_VERSION'] = get_application_ini_value(tmpdir, 'App', 'Version') env['MAR'] = mozpath.normpath(mar) if arch: env['BCJ_OPTIONS'] = ' '.join(_BCJ_OPTIONS[arch]) if mar_format == 'bz2': env['MAR_OLD_FORMAT'] = '1' if mar_channel_id: env['MAR_CHANNEL_ID'] = mar_channel_id # The Windows build systems have xz installed but it isn't in the path # like it is on Linux and Mac OS X so just use the XZ env var so the mar # generation scripts can find it. xz_path = mozpath.join(topsrcdir, 'xz/xz.exe') if os.path.exists(xz_path): env['XZ'] = mozpath.normpath(xz_path) cmd = [make_full_update, output, ffxdir] if sys.platform == 'win32': # make_full_update.sh is a bash script, and Windows needs to # explicitly call out the shell to execute the script from Python. cmd.insert(0, env['MOZILLABUILD'] + '/msys/bin/bash.exe') subprocess.check_call(cmd, env=ensure_subprocess_env(env)) finally: shutil.rmtree(tmpdir)
def test_faster_make(self): substs = list(BASE_SUBSTS) + [ ("MOZ_BUILD_APP", "dummy_app"), ("MOZ_WIDGET_TOOLKIT", "dummy_widget"), ] with self.do_test_backend(RecursiveMakeBackend, FasterMakeBackend, substs=substs) as config: buildid = mozpath.join(config.topobjdir, "config", "buildid") ensureParentDir(buildid) with open(buildid, "w") as fh: fh.write("20100101012345\n") build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) build._config_environment = config overrides = [ "TEST_MOZBUILD=1", ] with self.line_handler() as handle_make_line: build._run_make( directory=mozpath.join(config.topobjdir, "faster"), target=overrides, silent=False, line_handler=handle_make_line, ) self.validate(config)
def main(argv): parser = argparse.ArgumentParser(description="Merge l10n files.") parser.add_argument("--output", help="Path to write merged output") parser.add_argument("--ref-file", help="Path to reference file (en-US)") parser.add_argument("--l10n-file", help="Path to locale file") args = parser.parse_args(argv) from compare_locales.compare import ( ContentComparer, Observer, ) from compare_locales.paths import File cc = ContentComparer([Observer()]) cc.compare( File(args.ref_file, args.ref_file, ""), File(args.l10n_file, args.l10n_file, ""), args.output, ) ensureParentDir(args.output) if not os.path.exists(args.output): src = args.l10n_file if not os.path.exists(args.l10n_file): src = args.ref_file shutil.copy(src, args.output) return 0
def dump_cache(self): if self._skip_cache: self.log(logging.DEBUG, "artifact", {}, "Skipping cache: ignoring dump_cache!") return ensureParentDir(self._cache_filename) pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, "wb"), -1)
def repackage_mar(topsrcdir, package, mar, output): if not zipfile.is_zipfile(package): raise Exception("Package file %s is not a valid .zip file." % package) ensureParentDir(output) tmpdir = tempfile.mkdtemp() try: z = zipfile.ZipFile(package) z.extractall(tmpdir) filelist = z.namelist() z.close() # Make sure the .zip file just contains a directory like 'firefox/' at # the top, and find out what it is called. toplevel_dirs = set([mozpath.split(f)[0] for f in filelist]) if len(toplevel_dirs) != 1: raise Exception("Package file is expected to have a single top-level directory (eg: 'firefox'), not: %s" % toplevel_dirs) ffxdir = mozpath.join(tmpdir, toplevel_dirs.pop()) make_full_update = mozpath.join(topsrcdir, 'tools/update-packaging/make_full_update.sh') env = os.environ.copy() env['MOZ_FULL_PRODUCT_VERSION'] = get_application_ini_value(tmpdir, 'App', 'Version') env['MAR'] = mozpath.normpath(mar) cmd = [make_full_update, output, ffxdir] if sys.platform == 'win32': # make_full_update.sh is a bash script, and Windows needs to # explicitly call out the shell to execute the script from Python. cmd.insert(0, env['MOZILLABUILD'] + '/msys/bin/bash.exe') subprocess.check_call(cmd, env=env) finally: shutil.rmtree(tmpdir)
def dump_cache(self): if self._skip_cache: self.log(logging.DEBUG, 'artifact', {}, 'Skipping cache: ignoring dump_cache!') return ensureParentDir(self._cache_filename) pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
def install_from_file(self, filename, distdir): self.log(logging.INFO, "artifact", {"filename": filename}, "Installing from {filename}") # Do we need to post-process? processed_filename = filename + PROCESSED_SUFFIX if self._skip_cache and os.path.exists(processed_filename): self.log( logging.DEBUG, "artifact", {"path": processed_filename}, "Skipping cache: removing cached processed artifact {path}", ) os.remove(processed_filename) if not os.path.exists(processed_filename): self.log(logging.INFO, "artifact", {"filename": filename}, "Processing contents of {filename}") self.log( logging.INFO, "artifact", {"processed_filename": processed_filename}, "Writing processed {processed_filename}", ) self._artifact_job.process_artifact(filename, processed_filename) self.log( logging.INFO, "artifact", {"processed_filename": processed_filename}, "Installing from processed {processed_filename}", ) # Copy all .so files, avoiding modification where possible. ensureParentDir(mozpath.join(distdir, ".dummy")) with zipfile.ZipFile(processed_filename) as zf: for info in zf.infolist(): if info.filename.endswith(".ini"): continue n = mozpath.join(distdir, info.filename) fh = FileAvoidWrite(n, mode="rb") shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log( logging.INFO, "artifact", {"updating": "Updating" if file_updated else "Not updating", "filename": n}, "{updating} {filename}", ) if not file_existed or file_updated: # Libraries and binaries may need to be marked executable, # depending on platform. perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689. perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r. os.chmod(n, perms) return 0
def add(self, path, content=None): # Put foo/qux files under $tmp/b. if path.startswith('foo/qux/'): real_path = mozpath.join('b', path[8:]) else: real_path = mozpath.join('a', path) ensureParentDir(self.tmppath(real_path)) if not content: content = path open(self.tmppath(real_path), 'wb').write(content)
def add(self, path, content=None): # Put foo/qux files under $tmp/b. if path.startswith("foo/qux/"): real_path = mozpath.join("b", path[8:]) else: real_path = mozpath.join("a", path) ensureParentDir(self.tmppath(real_path)) if not content: content = six.ensure_binary(path) open(self.tmppath(real_path), "wb").write(content)
def emit_code(fd, pref_list_filename): pp = Preprocessor() pp.context.update(buildconfig.defines["ALLDEFINES"]) # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines. if buildconfig.substs.get("MOZ_DEBUG"): pp.context["DEBUG"] = "1" if buildconfig.substs.get("CPU_ARCH") == "aarch64": pp.context["MOZ_AARCH64"] = True pp.out = StringIO() pp.do_filter("substitution") pp.do_include(pref_list_filename) try: pref_list = yaml.safe_load(pp.out.getvalue()) input_file = os.path.relpath( pref_list_filename, os.environ.get("GECKO_PATH", os.environ.get("TOPSRCDIR")), ) code = generate_code(pref_list, input_file) except (IOError, ValueError) as e: print("{}: error:\n {}\n".format(pref_list_filename, e)) sys.exit(1) # When generating multiple files from a script, the build system treats the # first named output file (StaticPrefListAll.h in this case) specially -- it # is created elsewhere, and written to via `fd`. fd.write(code["static_pref_list_all_h"]) # We must create the remaining output files ourselves. This requires # creating the output directory directly if it doesn't already exist. ensureParentDir(fd.name) init_dirname = os.path.dirname(fd.name) with FileAvoidWrite("StaticPrefsAll.h") as fd: fd.write(code["static_prefs_all_h"]) for group, text in sorted(code["static_pref_list_group_h"].items()): filename = "StaticPrefList_{}.h".format(group) with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd: fd.write(text) for group, text in sorted(code["static_prefs_group_h"].items()): filename = "StaticPrefs_{}.h".format(group) with FileAvoidWrite(filename) as fd: fd.write(text) with FileAvoidWrite(os.path.join(init_dirname, "StaticPrefsCGetters.cpp")) as fd: fd.write(code["static_prefs_c_getters_cpp"]) with FileAvoidWrite("static_prefs.rs") as fd: fd.write(code["static_prefs_rs"])
def test_composed_finder(self): self.prepare_match_test() # Also add files in $tmp/a/foo/qux because ComposedFinder is # expected to mask foo/qux entirely with content from $tmp/b. ensureParentDir(self.tmppath('a/foo/qux/hoge')) open(self.tmppath('a/foo/qux/hoge'), 'wb').write('hoge') open(self.tmppath('a/foo/qux/bar'), 'wb').write('not the right content') self.finder = ComposedFinder({ '': FileFinder(self.tmppath('a')), 'foo/qux': FileFinder(self.tmppath('b')), }) self.do_match_test()
def gradle_lock(topobjdir, max_wait_seconds=600): # Building the same Gradle root project with multiple concurrent processes # is not well supported, so we use a simple lock file to serialize build # steps. lock_path = '{}/gradle/mach_android.lockfile'.format(topobjdir) ensureParentDir(lock_path) lock_instance = lock_file(lock_path, max_wait=max_wait_seconds) try: yield finally: del lock_instance
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Do we need to post-process? processed_filename = filename + PROCESSED_SUFFIX if self._skip_cache and os.path.exists(processed_filename): self.log(logging.DEBUG, 'artifact', {'path': processed_filename}, 'Skipping cache: removing cached processed artifact {path}') os.remove(processed_filename) if not os.path.exists(processed_filename): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Processing contents of {filename}') self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Writing processed {processed_filename}') self._artifact_job.process_artifact(filename, processed_filename) self._artifact_cache._persist_limit.register_file(processed_filename) self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Installing from processed {processed_filename}') # Copy all .so files, avoiding modification where possible. ensureParentDir(mozpath.join(distdir, '.dummy')) with zipfile.ZipFile(processed_filename) as zf: for info in zf.infolist(): if info.filename.endswith('.ini'): continue n = mozpath.join(distdir, info.filename) fh = FileAvoidWrite(n, mode='rb') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log(logging.INFO, 'artifact', {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n}, '{updating} {filename}') if not file_existed or file_updated: # Libraries and binaries may need to be marked executable, # depending on platform. perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689. perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r. os.chmod(n, perms) return 0
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Do we need to post-process? processed_filename = filename + PROCESSED_SUFFIX if self._skip_cache and os.path.exists(processed_filename): self.log(logging.INFO, 'artifact', {'path': processed_filename}, 'Skipping cache: removing cached processed artifact {path}') os.remove(processed_filename) if not os.path.exists(processed_filename): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Processing contents of {filename}') self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Writing processed {processed_filename}') self._artifact_job.process_artifact(filename, processed_filename) self._artifact_cache._persist_limit.register_file(processed_filename) self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Installing from processed {processed_filename}') # Copy all .so files, avoiding modification where possible. ensureParentDir(mozpath.join(distdir, '.dummy')) with zipfile.ZipFile(processed_filename) as zf: for info in zf.infolist(): if info.filename.endswith('.ini'): continue n = mozpath.join(distdir, info.filename) fh = FileAvoidWrite(n, mode='rb') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log(logging.INFO, 'artifact', {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n}, '{updating} {filename}') if not file_existed or file_updated: # Libraries and binaries may need to be marked executable, # depending on platform. perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689. perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r. os.chmod(n, perms) return 0
def process_package_overload(src, dst, version, app_buildid): ensureParentDir(dst) # First replace numeric version like '1.3' # Then replace with 'slashed' version like '1_4' # Finally set the full length addon version like 1.3.20131230 defines = { "NUM_VERSION": version, "SLASH_VERSION": version.replace(".", "_"), "FULL_VERSION": ("%s.%s" % (version, app_buildid)) } pp = Preprocessor(defines=defines) pp.do_filter("substitution") with open(dst, "w") as output: with open(src, "r") as input: pp.processFile(input=input, output=output)
def emit_code(fd, pref_list_filename): pp = Preprocessor() pp.context.update(buildconfig.defines['ALLDEFINES']) # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines. if buildconfig.substs.get('MOZ_DEBUG'): pp.context['DEBUG'] = '1' pp.out = BytesIO() pp.do_filter('substitution') pp.do_include(pref_list_filename) try: pref_list = yaml.safe_load(pp.out.getvalue()) code = generate_code(pref_list) except (IOError, ValueError) as e: print('{}: error:\n {}\n'.format(pref_list_filename, e)) sys.exit(1) # When generating multiple files from a script, the build system treats the # first named output file (StaticPrefListAll.h in this case) specially -- it # is created elsewhere, and written to via `fd`. fd.write(code['static_pref_list_all_h']) # We must create the remaining output files ourselves. This requires # creating the output directory directly if it doesn't already exist. ensureParentDir(fd.name) init_dirname = os.path.dirname(fd.name) with FileAvoidWrite('StaticPrefsAll.h') as fd: fd.write(code['static_prefs_all_h']) for group, text in sorted(code['static_pref_list_group_h'].items()): filename = 'StaticPrefList_{}.h'.format(group) with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd: fd.write(text) for group, text in sorted(code['static_prefs_group_h'].items()): filename = 'StaticPrefs_{}.h'.format(group) with FileAvoidWrite(filename) as fd: fd.write(text) with FileAvoidWrite(os.path.join(init_dirname, 'StaticPrefsCGetters.cpp')) as fd: fd.write(code['static_prefs_c_getters_cpp']) with FileAvoidWrite('static_prefs.rs') as fd: fd.write(code['static_prefs_rs'])
def _generate_geckoview_classes_jar(distdir, base_path): base_folder = FileFinder(base_path, ignore=['gecko-R.jar']) # Unzip all jar files into $(DISTDIR)/geckoview_aar_classes. geckoview_aar_classes_path = os.path.join(distdir, 'geckoview_aar_classes') shutil.rmtree(geckoview_aar_classes_path, ignore_errors=True) util.ensureParentDir(geckoview_aar_classes_path) for p, f in base_folder.find('*.jar'): with zipfile.ZipFile(f.path) as zf: zf.extractall(geckoview_aar_classes_path) # Rezip them into a single classes.jar file. classes_jar_path = os.path.join(distdir, 'classes.jar') _zipdir(geckoview_aar_classes_path, classes_jar_path) return File(classes_jar_path)
def test_composed_finder(self): self.prepare_match_test() # Also add files in $tmp/a/foo/qux because ComposedFinder is # expected to mask foo/qux entirely with content from $tmp/b. ensureParentDir(self.tmppath("a/foo/qux/hoge")) open(self.tmppath("a/foo/qux/hoge"), "wb").write(b"hoge") open(self.tmppath("a/foo/qux/bar"), "wb").write(b"not the right content") self.finder = ComposedFinder({ "": FileFinder(self.tmppath("a")), "foo/qux": FileFinder(self.tmppath("b")), }) self.do_match_test() self.assertIsNone(self.finder.get("does-not-exist")) self.assertIsInstance(self.finder.get("bar"), File)
def preprocess_file(src, dst, version, app_buildid, update_url): ensureParentDir(dst) defines = { "ADDON_ID": "fxos_" + version.replace(".", "_") + "*****@*****.**", # (reduce the app build id to only the build date # as addon manager doesn't handle big ints in addon versions) "ADDON_VERSION": ("%s.%s" % (version, app_buildid[:8])), "ADDON_NAME": "Firefox OS " + version + " Simulator", "ADDON_DESCRIPTION": "a Firefox OS " + version + " simulator", "ADDON_UPDATE_URL": update_url } pp = Preprocessor(defines=defines) pp.do_filter("substitution") with open(dst, "w") as output: with open(src, "r") as input: pp.processFile(input=input, output=output)
def create_dmg_from_staged(stagedir, output_dmg, tmpdir, volume_name): "Given a prepared directory stagedir, produce a DMG at output_dmg." if not is_linux: # Running on OS X hybrid = os.path.join(tmpdir, "hybrid.dmg") subprocess.check_call([ "hdiutil", "makehybrid", "-hfs", "-hfs-volume-name", volume_name, "-hfs-openfolder", stagedir, "-ov", stagedir, "-o", hybrid, ]) subprocess.check_call([ "hdiutil", "convert", "-format", "UDBZ", "-imagekey", "bzip2-level=9", "-ov", hybrid, "-o", output_dmg, ]) else: # The dmg tool doesn't create the destination directories, and silently # returns success if the parent directory doesn't exist. ensureParentDir(output_dmg) hfs = os.path.join(tmpdir, "staged.hfs") subprocess.check_call( [buildconfig.substs["HFS_TOOL"], hfs, "addall", stagedir]) subprocess.check_call( [buildconfig.substs["DMG_TOOL"], "build", hfs, output_dmg], # dmg is seriously chatty stdout=open(os.devnull, "wb"), )
def android(verb, *args): # Building the same Gradle root project with multiple concurrent processes # is not well supported, so we use a simple lock file to serialize build # steps. lock_path = '{}/gradle/mach_android.lockfile'.format(buildconfig.topobjdir) ensureParentDir(lock_path) lock_instance = lock_file(lock_path) try: cmd = [ mozpath.join(buildconfig.topsrcdir, 'mach'), 'android', verb, ] cmd.extend(args) subprocess.check_call(cmd) return 0 finally: del lock_instance
def addEntriesToListFile(listFile, entries): """Given a file |listFile| containing one entry per line, add each entry in |entries| to the file, unless it is already present.""" ensureParentDir(listFile) lock = lock_file(listFile + ".lck") try: if os.path.exists(listFile): f = open(listFile) existing = set(x.strip() for x in f.readlines()) f.close() else: existing = set() for e in entries: if e not in existing: existing.add(e) with open(listFile, 'wb') as f: f.write("\n".join(sorted(existing))+"\n") finally: lock = None
def addEntriesToListFile(listFile, entries): """Given a file |listFile| containing one entry per line, add each entry in |entries| to the file, unless it is already present.""" ensureParentDir(listFile) lock = lock_file(listFile + '.lck') try: if os.path.exists(listFile): f = io.open(listFile) existing = set(x.strip() for x in f.readlines()) f.close() else: existing = set() for e in entries: if e not in existing: existing.add(e) with io.open(listFile, 'w', newline='\n') as f: f.write('\n'.join(sorted(existing)) + '\n') finally: del lock # Explicitly release the lock_file to free it
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Copy all .so files to dist/bin, avoiding modification where possible. ensureParentDir(os.path.join(distdir, 'bin', '.dummy')) with zipfile.ZipFile(filename) as zf: for info in zf.infolist(): if not info.filename.endswith('.so'): continue n = os.path.join(distdir, 'bin', os.path.basename(info.filename)) fh = FileAvoidWrite(n, mode='r') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log(logging.INFO, 'artifact', {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n}, '{updating} {filename}') return 0
def main(argv): parser = argparse.ArgumentParser(description='Produce test archives') parser.add_argument('archive', help='Which archive to generate') parser.add_argument('outputfile', help='File to write output to') args = parser.parse_args(argv) out_file = args.outputfile if not out_file.endswith(('.tar.gz', '.zip')): raise Exception('expected tar.gz or zip output file') file_count = 0 t_start = time.time() ensureParentDir(out_file) res = find_files(args.archive) with open(out_file, 'wb') as fh: # Experimentation revealed that level 5 is significantly faster and has # marginally larger sizes than higher values and is the sweet spot # for optimal compression. Read the detailed commit message that # introduced this for raw numbers. if out_file.endswith('.tar.gz'): files = dict(res) create_tar_gz_from_files(fh, files, compresslevel=5) file_count = len(files) elif out_file.endswith('.zip'): with JarWriter(fileobj=fh, optimize=False, compress_level=5) as writer: for p, f in res: writer.add(p.encode('utf-8'), f.read(), mode=f.mode, skip_duplicates=True) file_count += 1 else: raise Exception('unhandled file extension: %s' % out_file) duration = time.time() - t_start zip_size = os.path.getsize(args.outputfile) basename = os.path.basename(args.outputfile) print('Wrote %d files in %d bytes to %s in %.2fs' % (file_count, zip_size, basename, duration))
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Copy all .so files to dist/bin, avoiding modification where possible. ensureParentDir(os.path.join(distdir, 'bin', '.dummy')) with zipfile.ZipFile(filename) as zf: for info in zf.infolist(): if not info.filename.endswith('.so'): continue n = os.path.join(distdir, 'bin', os.path.basename(info.filename)) fh = FileAvoidWrite(n, mode='r') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log( logging.INFO, 'artifact', { 'updating': 'Updating' if file_updated else 'Not updating', 'filename': n }, '{updating} {filename}') return 0
def test_faster_recursive_make(self): substs = list(BASE_SUBSTS) + [ ('BUILD_BACKENDS', 'FasterMake+RecursiveMake'), ] with self.do_test_backend(get_backend_class( 'FasterMake+RecursiveMake'), substs=substs) as config: buildid = mozpath.join(config.topobjdir, 'config', 'buildid') ensureParentDir(buildid) with open(buildid, 'w') as fh: fh.write('20100101012345\n') build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) overrides = [ 'install_manifest_depends=', 'MOZ_JAR_MAKER_FILE_FORMAT=flat', 'TEST_MOZBUILD=1', ] with self.line_handler() as handle_make_line: build._run_make(directory=config.topobjdir, target=overrides, silent=False, line_handler=handle_make_line) self.validate(config)
def add(self, path): with self._client() as c: ensureParentDir(self.tmppath(path)) with open(self.tmppath(path), 'wb') as fh: fh.write(path) c.add(self.tmppath(path))
def add(self, path): c = hglib.open(self.tmpdir) ensureParentDir(self.tmppath(path)) with open(self.tmppath(path), 'wb') as fh: fh.write(path) c.add(self.tmppath(path))
def add(self, path): ensureParentDir(self.tmppath(path)) open(self.tmppath(path), 'wb').write(path)
def create_one(self, which, path, content): file = self.tmppath(os.path.join(which, path)) ensureParentDir(file) open(file, 'wb').write(content)
def dump_cache(self): ensureParentDir(self._cache_filename) pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)