def webidl_test(self, command_context, **kwargs): sys.path.insert( 0, os.path.join(command_context.topsrcdir, "other-licenses", "ply")) # Ensure the topobjdir exists. On a Taskcluster test run there won't be # an objdir yet. mkdir(command_context.topobjdir) # Make sure we drop our cached grammar bits in the objdir, not # wherever we happen to be running from. os.chdir(command_context.topobjdir) if kwargs["verbose"] is None: kwargs["verbose"] = False # Now we're going to create the cached grammar file in the # objdir. But we're going to try loading it as a python # module, so we need to make sure the objdir is in our search # path. sys.path.insert(0, command_context.topobjdir) import runtests return runtests.run_tests(kwargs["tests"], verbose=kwargs["verbose"])
def _make_list_file(self, kind, objdir, objs, name): if not objs: return None if kind == 'target': list_style = self.environment.substs.get('EXPAND_LIBS_LIST_STYLE') else: # The host compiler is not necessarily the same kind as the target # compiler, so we can't be sure EXPAND_LIBS_LIST_STYLE is the right # style to use ; however, all compilers support the `list` type, so # use that. That doesn't cause any practical problem because where # it really matters to use something else than `list` is when # linking tons of objects (because of command line argument limits), # which only really happens for libxul. list_style = 'list' list_file_path = mozpath.join(objdir, name) objs = [os.path.relpath(o, objdir) for o in objs] if list_style == 'linkerscript': ref = list_file_path content = '\n'.join('INPUT("%s")' % o for o in objs) elif list_style == 'filelist': ref = "-Wl,-filelist," + list_file_path content = '\n'.join(objs) elif list_style == 'list': ref = "@" + list_file_path content = '\n'.join(objs) else: return None mkdir(objdir) with self._write_file(list_file_path) as fh: fh.write(content) return ref
def __init__(self, cache_dir, log=None, skip_cache=False): mkdir(cache_dir, not_indexed=True) self._cache_dir = cache_dir self._log = log self._skip_cache = skip_cache self._persist_limit = ArtifactPersistLimit(log) self._download_manager = DownloadManager( self._cache_dir, persist_limit=self._persist_limit) self._last_dl_update = -1
def __init__(self, cache_dir, log=None, skip_cache=False): mkdir(cache_dir, not_indexed=True) self._cache_dir = cache_dir self._log = log self._skip_cache = skip_cache self._persist_limit = ArtifactPersistLimit(log) self._download_manager = DownloadManager( self._cache_dir, persist_limit=self._persist_limit) self._last_dl_update = -1
def _make_list_file(self, objdir, objs, name): if not objs: return None list_style = self.environment.substs.get('EXPAND_LIBS_LIST_STYLE') list_file_path = mozpath.join(objdir, name) objs = [os.path.relpath(o, objdir) for o in objs] if list_style == 'linkerscript': ref = list_file_path content = '\n'.join('INPUT("%s")' % o for o in objs) elif list_style == 'filelist': ref = "-Wl,-filelist," + list_file_path content = '\n'.join(objs) elif list_style == 'list': ref = "@" + list_file_path content = '\n'.join(objs) else: return None mkdir(objdir) with self._write_file(list_file_path) as fh: fh.write(content) return ref
def webidl_test(self, **kwargs): sys.path.insert(0, os.path.join(self.topsrcdir, 'other-licenses', 'ply')) # Ensure the topobjdir exists. On a Taskcluster test run there won't be # an objdir yet. mkdir(self.topobjdir) # Make sure we drop our cached grammar bits in the objdir, not # wherever we happen to be running from. os.chdir(self.topobjdir) if kwargs["verbose"] is None: kwargs["verbose"] = False # Now we're going to create the cached grammar file in the # objdir. But we're going to try loading it as a python # module, so we need to make sure the objdir is in our search # path. sys.path.insert(0, self.topobjdir) import runtests return runtests.run_tests(kwargs["tests"], verbose=kwargs["verbose"])
def write_mozbuild( config, srcdir, output, non_unified_sources, gn_config_files, mozilla_flags, write_mozbuild_variables, ): all_mozbuild_results = [] for path in sorted(gn_config_files): with open(path, "r") as fh: gn_config = json.load(fh) mozbuild_attrs = process_gn_config( gn_config, srcdir, config, output, non_unified_sources, gn_config["sandbox_vars"], mozilla_flags, ) all_mozbuild_results.append(mozbuild_attrs) # Translate {config -> {dirs -> build info}} into # {dirs -> [(config, build_info)]} configs_by_dir = defaultdict(list) for config_attrs in all_mozbuild_results: mozbuild_args = config_attrs["mozbuild_args"] dirs = config_attrs["dirs"] for d, build_data in dirs.items(): configs_by_dir[d].append((mozbuild_args, build_data)) for relsrcdir, configs in sorted(configs_by_dir.items()): target_srcdir = mozpath.join(config.topsrcdir, relsrcdir) mkdir(target_srcdir) target_mozbuild = mozpath.join(target_srcdir, "moz.build") with open(target_mozbuild, "w") as fh: mb = MozbuildWriter(fh) mb.write(license_header) mb.write("\n") mb.write(generated_header) try: if relsrcdir in write_mozbuild_variables[ "INCLUDE_TK_CFLAGS_DIRS"]: mb.write('if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk":\n') mb.write(' CXXFLAGS += CONFIG["TK_CFLAGS"]\n') except KeyError: pass all_args = [args for args, _ in configs] # Start with attributes that will be a part of the mozconfig # for every configuration, then factor by other potentially useful # combinations. for attrs in ( (), ("MOZ_DEBUG", ), ("OS_TARGET", ), ("CPU_ARCH", ), ("MOZ_DEBUG", "OS_TARGET"), ("OS_TARGET", "CPU_ARCH"), ("OS_TARGET", "CPU_ARCH", "MOZ_DEBUG"), ("MOZ_DEBUG", "OS_TARGET", "CPU_ARCH", "HOST_CPU_ARCH"), ): conditions = set() for args in all_args: cond = tuple(((k, args.get(k) or "") for k in attrs)) conditions.add(cond) for cond in sorted(conditions): common_attrs = find_common_attrs([ attrs for args, attrs in configs if all((args.get(k) or "") == v for k, v in cond) ]) if any(common_attrs.values()): if cond: mb.write_condition(dict(cond)) mb.write_attrs(common_attrs) if cond: mb.terminate_condition() mb.finalize() dirs_mozbuild = mozpath.join(srcdir, "moz.build") with open(dirs_mozbuild, "w") as fh: mb = MozbuildWriter(fh) mb.write(license_header) mb.write("\n") mb.write(generated_header) # Not every srcdir is present for every config, which needs to be # reflected in the generated root moz.build. dirs_by_config = { tuple(v["mozbuild_args"].items()): set(v["dirs"].keys()) for v in all_mozbuild_results } for attrs in ((), ("OS_TARGET", ), ("OS_TARGET", "CPU_ARCH")): conditions = set() for args in dirs_by_config.keys(): cond = tuple(((k, dict(args).get(k) or "") for k in attrs)) conditions.add(cond) for cond in sorted(conditions): common_dirs = None for args, dir_set in dirs_by_config.items(): if all((dict(args).get(k) or "") == v for k, v in cond): if common_dirs is None: common_dirs = deepcopy(dir_set) else: common_dirs &= dir_set for args, dir_set in dirs_by_config.items(): if all(dict(args).get(k) == v for k, v in cond): dir_set -= common_dirs if common_dirs: if cond: mb.write_condition(dict(cond)) mb.write_mozbuild_list("DIRS", ["/%s" % d for d in common_dirs]) if cond: mb.terminate_condition()
def write_mozbuild(config, srcdir, output, non_unified_sources, gn_config_files, mozilla_flags): all_mozbuild_results = [] for path in gn_config_files: with open(path, 'r') as fh: gn_config = json.load(fh) mozbuild_attrs = process_gn_config(gn_config, srcdir, config, output, non_unified_sources, gn_config['sandbox_vars'], mozilla_flags) all_mozbuild_results.append(mozbuild_attrs) # Translate {config -> {dirs -> build info}} into # {dirs -> [(config, build_info)]} configs_by_dir = defaultdict(list) for config_attrs in all_mozbuild_results: mozbuild_args = config_attrs['mozbuild_args'] dirs = config_attrs['dirs'] for d, build_data in dirs.items(): configs_by_dir[d].append((mozbuild_args, build_data)) for relsrcdir, configs in configs_by_dir.items(): target_srcdir = mozpath.join(config.topsrcdir, relsrcdir) mkdir(target_srcdir) target_mozbuild = mozpath.join(target_srcdir, 'moz.build') with open(target_mozbuild, 'w') as fh: mb = MozbuildWriter(fh) mb.write(license_header) mb.write('\n') mb.write(generated_header) all_attr_sets = [attrs for _, attrs in configs] all_args = [args for args, _ in configs] # Start with attributes that will be a part of the mozconfig # for every configuration, then factor by other potentially useful # combinations. for attrs in ((), ('MOZ_DEBUG', ), ('OS_TARGET', ), ( 'MOZ_DEBUG', 'OS_TARGET', ), ('MOZ_DEBUG', 'OS_TARGET', 'CPU_ARCH', 'HOST_CPU_ARCH')): conditions = set() for args in all_args: cond = tuple(((k, args.get(k)) for k in attrs)) conditions.add(cond) for cond in sorted(conditions): common_attrs = find_common_attrs([ attrs for args, attrs in configs if all( args.get(k) == v for k, v in cond) ]) if any(common_attrs.values()): if cond: mb.write_condition(dict(cond)) mb.write_attrs(common_attrs) if cond: mb.terminate_condition() mb.finalize() dirs_mozbuild = mozpath.join(srcdir, 'moz.build') with open(dirs_mozbuild, 'w') as fh: mb = MozbuildWriter(fh) mb.write(license_header) mb.write('\n') mb.write(generated_header) # Not every srcdir is present for every config, which needs to be # reflected in the generated root moz.build. dirs_by_config = { tuple(v['mozbuild_args'].items()): set(v['dirs'].keys()) for v in all_mozbuild_results } for attrs in ((), ('OS_TARGET', ), ('OS_TARGET', 'CPU_ARCH')): conditions = set() for args in dirs_by_config.keys(): cond = tuple(((k, dict(args).get(k)) for k in attrs)) conditions.add(cond) for cond in conditions: common_dirs = None for args, dir_set in dirs_by_config.items(): if all(dict(args).get(k) == v for k, v in cond): if common_dirs is None: common_dirs = deepcopy(dir_set) else: common_dirs &= dir_set for args, dir_set in dirs_by_config.items(): if all(dict(args).get(k) == v for k, v in cond): dir_set -= common_dirs if common_dirs: if cond: mb.write_condition(dict(cond)) mb.write_mozbuild_list('DIRS', ['/%s' % d for d in common_dirs]) if cond: mb.terminate_condition()
def state_path(self): """Unpacked artifacts will be kept here.""" # The convention is $MOZBUILD_STATE_PATH/$FEATURE. res = mozpath.join(self._mach_context.state_dir, "browsertime") mkdir(res) return res
def setup_prerequisites(self): r"""Install browsertime and visualmetrics.py prerequisites.""" from mozbuild.action.tooltool import unpack_file from mozbuild.artifact_cache import ArtifactCache if not AUTOMATION and host_platform().startswith("linux"): # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't # do that (yet). Provide some guidance. try: from shutil import which except ImportError: from shutil_which import which im_programs = ("compare", "convert", "mogrify") for im_program in im_programs: prog = which(im_program) if not prog: print( "Error: On Linux, ImageMagick must be on the PATH. " "Install ImageMagick manually and try again (or update PATH). " "On Ubuntu and Debian, try `sudo apt-get install imagemagick`. " "On Fedora, try `sudo dnf install imagemagick`. " "On CentOS, try `sudo yum install imagemagick`.") return 1 # Download the visualmetrics.py requirements. artifact_cache = ArtifactCache(self.artifact_cache_path, log=self.log, skip_cache=False) fetches = host_fetches[host_platform()] for tool, fetch in sorted(fetches.items()): archive = artifact_cache.fetch(fetch["url"]) # TODO: assert type, verify sha256 (and size?). if fetch.get("unpack", True): cwd = os.getcwd() try: mkdir(self.state_path) os.chdir(self.state_path) self.log( logging.INFO, "browsertime", {"path": archive}, "Unpacking temporary location {path}", ) if "win64" in host_platform( ) and "imagemagick" in tool.lower(): # Windows archive does not contain a subfolder # so we make one for it here mkdir(fetch.get("path")) os.chdir( os.path.join(self.state_path, fetch.get("path"))) unpack_file(archive) os.chdir(self.state_path) else: unpack_file(archive) # Make sure the expected path exists after extraction path = os.path.join(self.state_path, fetch.get("path")) if not os.path.exists(path): raise Exception( "Cannot find an extracted directory: %s" % path) try: # Some archives provide binaries that don't have the # executable bit set so we need to set it here for root, dirs, files in os.walk(path): for edir in dirs: loc_to_change = os.path.join(root, edir) st = os.stat(loc_to_change) os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC) for efile in files: loc_to_change = os.path.join(root, efile) st = os.stat(loc_to_change) os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC) except Exception as e: raise Exception( "Could not set executable bit in %s, error: %s" % (path, str(e))) finally: os.chdir(cwd)
def setup(self, should_clobber=False): r'''Install browsertime and visualmetrics.py requirements.''' automation = bool(os.environ.get('MOZ_AUTOMATION')) from mozbuild.action.tooltool import unpack_file from mozbuild.artifact_cache import ArtifactCache sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint', 'eslint')) import setup_helper if not os.environ.get('MOZ_AUTOMATION') and host_platform().startswith('linux'): # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't # do that (yet). Provide some guidance. try: from shutil import which except ImportError: from shutil_which import which im_programs = ('compare', 'convert', 'mogrify') for im_program in im_programs: prog = which(im_program) if not prog: print('Error: On Linux, ImageMagick must be on the PATH. ' 'Install ImageMagick manually and try again (or update PATH). ' 'On Ubuntu and Debian, try `sudo apt-get install imagemagick`. ' 'On Fedora, try `sudo dnf install imagemagick`. ' 'On CentOS, try `sudo yum install imagemagick`.') return 1 # Download the visualmetrics.py requirements. artifact_cache = ArtifactCache(self.artifact_cache_path, log=self.log, skip_cache=False) fetches = host_fetches[host_platform()] for tool, fetch in sorted(fetches.items()): archive = artifact_cache.fetch(fetch['url']) # TODO: assert type, verify sha256 (and size?). if fetch.get('unpack', True): cwd = os.getcwd() try: mkdir(self.state_path) os.chdir(self.state_path) self.log( logging.INFO, 'browsertime', {'path': archive}, 'Unpacking temporary location {path}') unpack_file(archive) # Make sure the expected path exists after extraction path = os.path.join(self.state_path, fetch.get('path')) if not os.path.exists(path): raise Exception("Cannot find an extracted directory: %s" % path) try: # Some archives provide binaries that don't have the # executable bit set so we need to set it here for root, dirs, files in os.walk(path): for edir in dirs: loc_to_change = os.path.join(root, edir) st = os.stat(loc_to_change) os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC) for efile in files: loc_to_change = os.path.join(root, efile) st = os.stat(loc_to_change) os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC) except Exception as e: raise Exception( "Could not set executable bit in %s, error: %s" % (path, str(e)) ) finally: os.chdir(cwd) # Install the browsertime Node.js requirements. if not setup_helper.check_node_executables_valid(): return 1 if 'GECKODRIVER_BASE_URL' not in os.environ: # Use custom `geckodriver` with pre-release Android support. url = 'https://github.com/ncalexan/geckodriver/releases/download/v0.24.0-android/' os.environ['GECKODRIVER_BASE_URL'] = url self.log( logging.INFO, 'browsertime', {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')}, 'Installing browsertime node module from {package_json}') status = setup_helper.package_setup( BROWSERTIME_ROOT, 'browsertime', should_clobber=should_clobber, no_optional=automation) if status: return status if automation: return 0 return self.check()
def setup(self, should_clobber=False): r'''Install browsertime and visualmetrics.py requirements.''' from mozbuild.action.tooltool import unpack_file from mozbuild.artifact_cache import ArtifactCache sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint', 'eslint')) import setup_helper if host_platform().startswith('linux'): # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't # do that (yet). Provide some guidance. import which im_programs = ('compare', 'convert', 'mogrify') try: for im_program in im_programs: which.which(im_program) except which.WhichError as e: print( 'Error: {} On Linux, ImageMagick must be on the PATH. ' 'Install ImageMagick manually and try again (or update PATH). ' 'On Ubuntu and Debian, try `sudo apt-get install imagemagick`. ' 'On Fedora, try `sudo dnf install imagemagick`. ' 'On CentOS, try `sudo yum install imagemagick`.'.format(e)) return 1 # Download the visualmetrics.py requirements. artifact_cache = ArtifactCache(self.artifact_cache_path, log=self.log, skip_cache=False) fetches = host_fetches[host_platform()] for tool, fetch in sorted(fetches.items()): archive = artifact_cache.fetch(fetch['url']) # TODO: assert type, verify sha256 (and size?). if fetch.get('unpack', True): cwd = os.getcwd() try: mkdir(self.state_path) os.chdir(self.state_path) self.log(logging.INFO, 'browsertime', {'path': archive}, 'Unpacking temporary location {path}') unpack_file(archive) finally: os.chdir(cwd) # Install the browsertime Node.js requirements. if not setup_helper.check_node_executables_valid(): return 1 if 'GECKODRIVER_BASE_URL' not in os.environ: # Use custom `geckodriver` with pre-release Android support. url = 'https://github.com/ncalexan/geckodriver/releases/download/v0.24.0-android/' os.environ['GECKODRIVER_BASE_URL'] = url self.log( logging.INFO, 'browsertime', {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')}, 'Installing browsertime node module from {package_json}') status = setup_helper.package_setup(BROWSERTIME_ROOT, 'browsertime', should_clobber=should_clobber) if status: return status return self.check()
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False): self._skip_cache = skip_cache self._cache = pylru.lrucache(cache_size, callback=cache_callback) self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle') self._log = log mkdir(cache_dir, not_indexed=True)
def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False): self._skip_cache = skip_cache self._cache = pylru.lrucache(cache_size, callback=cache_callback) self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle') self._log = log mkdir(cache_dir, not_indexed=True)
def write_mozbuild(config, srcdir, output, non_unified_sources, gn_config_files, mozilla_flags): all_mozbuild_results = [] for path in gn_config_files: with open(path, 'r') as fh: gn_config = json.load(fh) mozbuild_attrs = process_gn_config(gn_config, srcdir, config, output, non_unified_sources, gn_config['sandbox_vars'], mozilla_flags) all_mozbuild_results.append(mozbuild_attrs) # Translate {config -> {dirs -> build info}} into # {dirs -> [(config, build_info)]} configs_by_dir = defaultdict(list) for config_attrs in all_mozbuild_results: mozbuild_args = config_attrs['mozbuild_args'] dirs = config_attrs['dirs'] for d, build_data in dirs.items(): configs_by_dir[d].append((mozbuild_args, build_data)) for relsrcdir, configs in configs_by_dir.items(): target_srcdir = mozpath.join(config.topsrcdir, relsrcdir) mkdir(target_srcdir) target_mozbuild = mozpath.join(target_srcdir, 'moz.build') with open(target_mozbuild, 'w') as fh: mb = MozbuildWriter(fh) mb.write(license_header) mb.write('\n') mb.write(generated_header) all_attr_sets = [attrs for _, attrs in configs] all_args = [args for args, _ in configs] # Start with attributes that will be a part of the mozconfig # for every configuration, then factor by other potentially useful # combinations. for attrs in ((), ('MOZ_DEBUG',), ('OS_TARGET',), ('MOZ_DEBUG', 'OS_TARGET',), ('MOZ_DEBUG', 'OS_TARGET', 'CPU_ARCH', 'HOST_CPU_ARCH')): conditions = set() for args in all_args: cond = tuple(((k, args.get(k)) for k in attrs)) conditions.add(cond) for cond in sorted(conditions): common_attrs = find_common_attrs([attrs for args, attrs in configs if all(args.get(k) == v for k, v in cond)]) if any(common_attrs.values()): if cond: mb.write_condition(dict(cond)) mb.write_attrs(common_attrs) if cond: mb.terminate_condition() mb.finalize() dirs_mozbuild = mozpath.join(srcdir, 'moz.build') with open(dirs_mozbuild, 'w') as fh: mb = MozbuildWriter(fh) mb.write(license_header) mb.write('\n') mb.write(generated_header) # Not every srcdir is present for every config, which needs to be # reflected in the generated root moz.build. dirs_by_config = {tuple(v['mozbuild_args'].items()): set(v['dirs'].keys()) for v in all_mozbuild_results} for attrs in ((), ('OS_TARGET',), ('OS_TARGET', 'CPU_ARCH')): conditions = set() for args in dirs_by_config.keys(): cond = tuple(((k, dict(args).get(k)) for k in attrs)) conditions.add(cond) for cond in conditions: common_dirs = None for args, dir_set in dirs_by_config.items(): if all(dict(args).get(k) == v for k, v in cond): if common_dirs is None: common_dirs = deepcopy(dir_set) else: common_dirs &= dir_set for args, dir_set in dirs_by_config.items(): if all(dict(args).get(k) == v for k, v in cond): dir_set -= common_dirs if common_dirs: if cond: mb.write_condition(dict(cond)) mb.write_mozbuild_list('DIRS', ['/%s' % d for d in common_dirs]) if cond: mb.terminate_condition()