def post_dispatch_handler(context, handler, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't do anything when... if should_skip_dispatch(context, handler): return # We call mach environment in client.mk which would cause the # data submission below to block the forward progress of make. if handler.name in ('environment'): return # We have not opted-in to telemetry if 'BUILD_SYSTEM_TELEMETRY' not in os.environ: return # Every n-th operation if random.randint(1, TELEMETRY_SUBMISSION_FREQUENCY) != 1: return with open(os.devnull, 'wb') as devnull: subprocess.Popen([sys.executable, os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), get_state_dir()[0]], stdout=devnull, stderr=devnull)
def populate_context(context, key=None): if key is None: return if key == 'state_dir': state_dir, is_environ = get_state_dir() if is_environ: if not os.path.exists(state_dir): print('Creating global state directory from environment variable: %s' % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get('MOZ_AUTOMATION'): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print('\nCreating default state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == 'topdir': return topsrcdir if key == 'telemetry_handler': return telemetry_handler if key == 'post_dispatch_handler': return post_dispatch_handler raise AttributeError(key)
def fzf_bootstrap(update=False): """Bootstrap fzf if necessary and return path to the executable. The bootstrap works by cloning the fzf repository and running the included `install` script. If update is True, we will pull the repository and re-run the install script. """ fzf_bin = find_executable('fzf') if fzf_bin and not update: return fzf_bin fzf_path = os.path.join(get_state_dir()[0], 'fzf') if update and not os.path.isdir(fzf_path): print("fzf installed somewhere other than {}, please update manually".format(fzf_path)) sys.exit(1) def get_fzf(): return find_executable('fzf', os.path.join(fzf_path, 'bin')) if update: ret = run(['git', 'pull'], cwd=fzf_path) if ret: print("Update fzf failed.") sys.exit(1) run_fzf_install_script(fzf_path) return get_fzf() if os.path.isdir(fzf_path): fzf_bin = get_fzf() if fzf_bin: return fzf_bin # Fzf is cloned, but binary doesn't exist. Try running the install script return fzf_bootstrap(update=True) install = raw_input("Could not detect fzf, install it now? [y/n]: ") if install.lower() != 'y': return if not find_executable('git'): print("Git not found.") print(FZF_INSTALL_FAILED) sys.exit(1) cmd = ['git', 'clone', '--depth', '1', 'https://github.com/junegunn/fzf.git'] if subprocess.call(cmd, cwd=os.path.dirname(fzf_path)): print(FZF_INSTALL_FAILED) sys.exit(1) run_fzf_install_script(fzf_path) print("Installed fzf to {}".format(fzf_path)) return get_fzf()
def telemetry_handler(context, data): # We have not opted-in to telemetry if "BUILD_SYSTEM_TELEMETRY" not in os.environ: return telemetry_dir = os.path.join(get_state_dir()[0], "telemetry") try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, "outgoing") try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise # Add common metadata to help submit sorted data later on. data["argv"] = sys.argv data.setdefault("system", {}).update( dict( architecture=list(platform.architecture()), machine=platform.machine(), python_version=platform.python_version(), release=platform.release(), system=platform.system(), version=platform.version(), ) ) if platform.system() == "Linux": dist = list(platform.linux_distribution()) data["system"]["linux_distribution"] = dist elif platform.system() == "Windows": win32_ver = (list((platform.win32_ver())),) data["system"]["win32_ver"] = win32_ver elif platform.system() == "Darwin": # mac version is a special Cupertino snowflake r, v, m = platform.mac_ver() data["system"]["mac_ver"] = [r, list(v), m] with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + ".json"), "w") as f: json.dump(data, f, sort_keys=True)
def telemetry_handler(context, data): # We have not opted-in to telemetry if 'BUILD_SYSTEM_TELEMETRY' not in os.environ: return telemetry_dir = os.path.join(get_state_dir()[0], 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise # Add common metadata to help submit sorted data later on. data['argv'] = sys.argv data.setdefault('system', {}).update(dict( architecture=list(platform.architecture()), machine=platform.machine(), python_version=platform.python_version(), release=platform.release(), system=platform.system(), version=platform.version(), )) if platform.system() == 'Linux': dist = list(platform.linux_distribution()) data['system']['linux_distribution'] = dist elif platform.system() == 'Windows': win32_ver=list((platform.win32_ver())), data['system']['win32_ver'] = win32_ver elif platform.system() == 'Darwin': # mac version is a special Cupertino snowflake r, v, m = platform.mac_ver() data['system']['mac_ver'] = [r, list(v), m] with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True)
def post_dispatch_handler(context, handler, instance, result, start_time, end_time, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't do anything when... if should_skip_dispatch(context, handler): return # We have not opted-in to telemetry if not context.settings.build.telemetry: return from mozbuild.telemetry import gather_telemetry from mozbuild.base import MozbuildObject if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() try: substs = instance.substs except Exception: substs = {} # We gather telemetry for every operation... gather_telemetry(command=handler.name, success=(result == 0), start_time=start_time, end_time=end_time, mach_context=context, substs=substs, paths=[instance.topsrcdir, instance.topobjdir]) # But only submit about every n-th operation if random.randint(1, TELEMETRY_SUBMISSION_FREQUENCY) != 1: return with open(os.devnull, 'wb') as devnull: subprocess.Popen([sys.executable, os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), get_state_dir()[0]], stdout=devnull, stderr=devnull)
def telemetry_handler(context, data): # We have not opted-in to telemetry if not context.settings.build.telemetry: return telemetry_dir = os.path.join(get_state_dir()[0], 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True)
def post_dispatch_handler(context, handler, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't do anything when... if should_skip_dispatch(context, handler): return # We have not opted-in to telemetry if not context.settings.build.telemetry: return # Every n-th operation if random.randint(1, TELEMETRY_SUBMISSION_FREQUENCY) != 1: return with open(os.devnull, 'wb') as devnull: subprocess.Popen([sys.executable, os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), get_state_dir()[0]], stdout=devnull, stderr=devnull)
def generate_tasks(params, full, root): params = params or "project=mozilla-central" cache_dir = os.path.join(get_state_dir()[0], 'cache', 'taskgraph') attr = 'full_task_set' if full else 'target_task_set' cache = os.path.join(cache_dir, attr) invalidate(cache, root) if os.path.isfile(cache): with open(cache, 'r') as fh: return fh.read().splitlines() if not os.path.isdir(cache_dir): os.makedirs(cache_dir) print("Task configuration changed, generating {}".format(attr.replace('_', ' '))) try: params = load_parameters_file(params, strict=False) params.check() except ParameterMismatch as e: print(PARAMETER_MISMATCH.format(e.args[0])) sys.exit(1) taskgraph.fast = True cwd = os.getcwd() os.chdir(build.topsrcdir) root = os.path.join(root, 'taskcluster', 'ci') tg = getattr(TaskGraphGenerator(root_dir=root, parameters=params), attr) labels = [label for label in tg.graph.visit_postorder()] os.chdir(cwd) with open(cache, 'w') as fh: fh.write('\n'.join(labels)) return labels
def bootstrap(self): if self.choice is None: # Like ['1. Firefox for Desktop', '2. Firefox for Android Artifact Mode', ...]. labels = ['%s. %s' % (i + 1, name) for (i, (name, _)) in enumerate(APPLICATIONS_LIST)] prompt = APPLICATION_CHOICE % '\n'.join(labels) prompt_choice = self.instance.prompt_int(prompt=prompt, low=1, high=len(APPLICATIONS)) name, application = APPLICATIONS_LIST[prompt_choice-1] elif self.choice not in APPLICATIONS.keys(): raise Exception('Please pick a valid application choice: (%s)' % '/'.join(APPLICATIONS.keys())) else: name, application = APPLICATIONS[self.choice] self.instance.install_system_packages() # Like 'install_browser_packages' or 'install_mobile_android_packages'. getattr(self.instance, 'install_%s_packages' % application)() hg_installed, hg_modern = self.instance.ensure_mercurial_modern() self.instance.ensure_python_modern() self.instance.ensure_rust_modern() # The state directory code is largely duplicated from mach_bootstrap.py. # We can't easily import mach_bootstrap.py because the bootstrapper may # run in self-contained mode and only the files in this directory will # be available. We /could/ refactor parts of mach_bootstrap.py to be # part of this directory to avoid the code duplication. state_dir, _ = get_state_dir() if not os.path.exists(state_dir): if not self.instance.no_interactive: choice = self.instance.prompt_int( prompt=STATE_DIR_INFO.format(statedir=state_dir), low=1, high=2) if choice == 1: print('Creating global state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) state_dir_available = os.path.exists(state_dir) # Install the clang packages needed for developing stylo. if not self.instance.no_interactive: choice = self.instance.prompt_int( prompt=STYLO_DEVELOPMENT_INFO, low=1, high=2) # The best place to install our packages is in the state directory # we have. If the user doesn't have one, we need them to re-run # bootstrap and create the directory. # # XXX Android bootstrap just assumes the existence of the state # directory and writes the NDK into it. Should we do the same? if choice == 1: if not state_dir_available: print(STYLO_DIRECTORY_MESSAGE.format(statedir=state_dir)) sys.exit(1) self.instance.stylo = True self.instance.state_dir = state_dir self.instance.ensure_stylo_packages(state_dir) checkout_type = current_firefox_checkout(check_output=self.instance.check_output, hg=self.instance.which('hg')) # Possibly configure Mercurial, but not if the current checkout is Git. # TODO offer to configure Git. if hg_installed and state_dir_available and checkout_type != 'git': configure_hg = False if not self.instance.no_interactive: choice = self.instance.prompt_int(prompt=CONFIGURE_MERCURIAL, low=1, high=2) if choice == 1: configure_hg = True else: configure_hg = self.hg_configure if configure_hg: configure_mercurial(self.instance.which('hg'), state_dir) # Offer to clone if we're not inside a clone. have_clone = False if checkout_type: have_clone = True elif hg_installed and not self.instance.no_interactive: dest = raw_input(CLONE_MERCURIAL) dest = dest.strip() if dest: dest = os.path.expanduser(dest) have_clone = clone_firefox(self.instance.which('hg'), dest) if not have_clone: print(SOURCE_ADVERTISE) print(self.finished % name) if not (self.instance.which('rustc') and self.instance._parse_version('rustc') >= MODERN_RUST_VERSION): print("To build %s, please restart the shell (Start a new terminal window)" % name) # Like 'suggest_browser_mozconfig' or 'suggest_mobile_android_mozconfig'. getattr(self.instance, 'suggest_%s_mozconfig' % application)()
def _finalize_telemetry_legacy(context, instance, handler, success, start_time, end_time, topsrcdir): """Record and submit legacy telemetry. Parameterized by the raw gathered telemetry, this function handles persisting and submission of the data. This has been designated as "legacy" telemetry because modern telemetry is being submitted with "Glean". """ from mozboot.util import get_state_dir from mozbuild.base import MozbuildObject from mozbuild.telemetry import gather_telemetry from mach.telemetry import (is_telemetry_enabled, is_applicable_telemetry_environment) if not (is_applicable_telemetry_environment() and is_telemetry_enabled(context.settings)): return if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() command_attrs = getattr(context, 'command_attrs', {}) # We gather telemetry for every operation. data = gather_telemetry(command=handler.name, success=success, start_time=start_time, end_time=end_time, mach_context=context, instance=instance, command_attrs=command_attrs) if data: telemetry_dir = os.path.join(get_state_dir(), 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) # The user is performing a maintenance command, skip the upload if handler.name in ( 'bootstrap', 'doctor', 'mach-commands', 'vcs-setup', 'create-mach-environment', 'install-moz-phab', # We call mach environment in client.mk which would cause the # data submission to block the forward progress of make. 'environment'): return False if 'TEST_MACH_TELEMETRY_NO_SUBMIT' in os.environ: # In our telemetry tests, we want telemetry to be collected for analysis, but # we don't want it submitted. return False state_dir = get_state_dir() machpath = os.path.join(instance.topsrcdir, 'mach') with open(os.devnull, 'wb') as devnull: subprocess.Popen([ sys.executable, machpath, 'python', '--no-virtualenv', os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), state_dir ], stdout=devnull, stderr=devnull)
def bootstrap(self): if self.choice is None: # Like ['1. Firefox for Desktop', '2. Firefox for Android Artifact Mode', ...]. labels = ["%s. %s" % (i + 1, name) for (i, (name, _)) in enumerate(APPLICATIONS_LIST)] prompt = APPLICATION_CHOICE % "\n".join(labels) prompt_choice = self.instance.prompt_int(prompt=prompt, low=1, high=len(APPLICATIONS)) name, application = APPLICATIONS_LIST[prompt_choice - 1] elif self.choice not in APPLICATIONS.keys(): raise Exception("Please pick a valid application choice: (%s)" % "/".join(APPLICATIONS.keys())) else: name, application = APPLICATIONS[self.choice] self.instance.install_system_packages() # Like 'install_browser_packages' or 'install_mobile_android_packages'. getattr(self.instance, "install_%s_packages" % application)() hg_installed, hg_modern = self.instance.ensure_mercurial_modern() self.instance.ensure_python_modern() # The state directory code is largely duplicated from mach_bootstrap.py. # We can't easily import mach_bootstrap.py because the bootstrapper may # run in self-contained mode and only the files in this directory will # be available. We /could/ refactor parts of mach_bootstrap.py to be # part of this directory to avoid the code duplication. state_dir, _ = get_state_dir() if not os.path.exists(state_dir): if not self.instance.no_interactive: choice = self.instance.prompt_int(prompt=STATE_DIR_INFO.format(statedir=state_dir), low=1, high=2) if choice == 1: print("Creating global state directory: %s" % state_dir) os.makedirs(state_dir, mode=0o770) state_dir_available = os.path.exists(state_dir) # Possibly configure Mercurial if the user wants to. # TODO offer to configure Git. if hg_installed and state_dir_available: configure_hg = False if not self.instance.no_interactive: choice = self.instance.prompt_int(prompt=CONFIGURE_MERCURIAL, low=1, high=2) if choice == 1: configure_hg = True else: configure_hg = self.hg_configure if configure_hg: configure_mercurial(self.instance.which("hg"), state_dir) # Offer to clone if we're not inside a clone. checkout_type = current_firefox_checkout(check_output=self.instance.check_output, hg=self.instance.which("hg")) have_clone = False if checkout_type: have_clone = True elif hg_installed and not self.instance.no_interactive: dest = raw_input(CLONE_MERCURIAL) dest = dest.strip() if dest: dest = os.path.expanduser(dest) have_clone = clone_firefox(self.instance.which("hg"), dest) if not have_clone: print(SOURCE_ADVERTISE) print(self.finished % name) # Like 'suggest_browser_mozconfig' or 'suggest_mobile_android_mozconfig'. getattr(self.instance, "suggest_%s_mozconfig" % application)()
def test_migration(cmd, obj_dir, to_test, references): '''Test the given recipe. This creates a workdir by l10n-merging gecko-strings and the m-c source, to mimmic gecko-strings after the patch to test landed. It then runs the recipe with a gecko-strings clone as localization, both dry and wet. It inspects the generated commits, and shows a diff between the merged reference and the generated content. The diff is intended to be visually inspected. Some changes might be expected, in particular when formatting of the en-US strings is different. ''' rv = 0 migration_name = os.path.splitext(os.path.split(to_test)[1])[0] work_dir = mozpath.join(obj_dir, migration_name) if os.path.exists(work_dir): shutil.rmtree(work_dir) os.makedirs(mozpath.join(work_dir, 'reference')) l10n_toml = mozpath.join(cmd.topsrcdir, 'browser', 'locales', 'l10n.toml') pc = TOMLParser().parse(l10n_toml, env={'l10n_base': work_dir}) pc.set_locales(['reference']) files = ProjectFiles('reference', [pc]) for ref in references: if ref != mozpath.normpath(ref): cmd.log( logging.ERROR, 'fluent-migration-test', { 'file': to_test, 'ref': ref, }, 'Reference path "{ref}" needs to be normalized for {file}') rv = 1 continue full_ref = mozpath.join(work_dir, 'reference', ref) m = files.match(full_ref) if m is None: raise ValueError("Bad reference path: " + ref) m_c_path = m[1] g_s_path = mozpath.join(work_dir, 'gecko-strings', ref) resources = [ b'' if not os.path.exists(f) else open(f, 'rb').read() for f in (g_s_path, m_c_path) ] ref_dir = os.path.dirname(full_ref) if not os.path.exists(ref_dir): os.makedirs(ref_dir) open(full_ref, 'wb').write(merge_channels(ref, resources)) client = hglib.clone(source=mozpath.join(get_state_dir(), 'gecko-strings'), dest=mozpath.join(work_dir, 'en-US')) client.open() old_tip = client.tip().node run_migration = [ cmd._virtualenv_manager.python_path, '-m', 'fluent.migrate.tool', '--lang', 'en-US', '--reference-dir', mozpath.join(work_dir, 'reference'), '--localization-dir', mozpath.join(work_dir, 'en-US'), '--dry-run', 'fluent_migrations.' + migration_name ] cmd.run_process( run_migration, cwd=work_dir, line_handler=print, ) # drop --dry-run run_migration.pop(-2) cmd.run_process( run_migration, cwd=work_dir, line_handler=print, ) tip = client.tip().node if old_tip == tip: cmd.log(logging.WARN, 'fluent-migration-test', { 'file': to_test, }, 'No migration applied for {file}') return rv for ref in references: diff_resources( mozpath.join(work_dir, 'reference', ref), mozpath.join(work_dir, 'en-US', ref), ) messages = [ l.desc.decode('utf-8') for l in client.log(b'::%s - ::%s' % (tip, old_tip)) ] bug = re.search('[0-9]{5,}', migration_name).group() # Just check first message for bug number, they're all following the same pattern if bug not in messages[0]: rv = 1 cmd.log(logging.ERROR, 'fluent-migration-test', { 'file': to_test, }, 'Missing or wrong bug number for {file}') if any('part {}'.format(n + 1) not in msg for n, msg in enumerate(messages)): rv = 1 cmd.log(logging.ERROR, 'fluent-migration-test', { 'file': to_test, }, 'Commit messages should have "part {{index}}" for {file}') return rv
def bootstrap(topsrcdir, mozilla_dir=None): if mozilla_dir is None: mozilla_dir = topsrcdir # Ensure we are running Python 2.7+. We put this check here so we generate a # user-friendly error message rather than a cryptic stack trace on module # import. if sys.version_info[0] != 2 or sys.version_info[1] < 7: print('Python 2.7 or above (but not Python 3) is required to run mach.') print('You are running Python', platform.python_version()) sys.exit(1) # Global build system and mach state is stored in a central directory. By # default, this is ~/.mozbuild. However, it can be defined via an # environment variable. We detect first run (by lack of this directory # existing) and notify the user that it will be created. The logic for # creation is much simpler for the "advanced" environment variable use # case. For default behavior, we educate users and give them an opportunity # to react. We always exit after creating the directory because users don't # like surprises. sys.path[0:0] = [os.path.join(mozilla_dir, path) for path in search_path(mozilla_dir, 'build/virtualenv_packages.txt')] import mach.main from mozboot.util import get_state_dir from mozbuild.util import patch_main patch_main() def telemetry_handler(context, data): # We have not opted-in to telemetry if 'BUILD_SYSTEM_TELEMETRY' not in os.environ: return telemetry_dir = os.path.join(get_state_dir()[0], 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise # Add common metadata to help submit sorted data later on. data['argv'] = sys.argv data.setdefault('system', {}).update(dict( architecture=list(platform.architecture()), machine=platform.machine(), python_version=platform.python_version(), release=platform.release(), system=platform.system(), version=platform.version(), )) if platform.system() == 'Linux': dist = list(platform.linux_distribution()) data['system']['linux_distribution'] = dist elif platform.system() == 'Windows': win32_ver=list((platform.win32_ver())), data['system']['win32_ver'] = win32_ver elif platform.system() == 'Darwin': # mac version is a special Cupertino snowflake r, v, m = platform.mac_ver() data['system']['mac_ver'] = [r, list(v), m] with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) def should_skip_dispatch(context, handler): # The user is performing a maintenance command. if handler.name in ('bootstrap', 'doctor', 'mach-commands', 'mercurial-setup'): return True # We are running in automation. if 'MOZ_AUTOMATION' in os.environ or 'TASK_ID' in os.environ: return True # The environment is likely a machine invocation. if sys.stdin.closed or not sys.stdin.isatty(): return True return False def post_dispatch_handler(context, handler, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't do anything when... if should_skip_dispatch(context, handler): return # We call mach environment in client.mk which would cause the # data submission below to block the forward progress of make. if handler.name in ('environment'): return # We have not opted-in to telemetry if 'BUILD_SYSTEM_TELEMETRY' not in os.environ: return # Every n-th operation if random.randint(1, TELEMETRY_SUBMISSION_FREQUENCY) != 1: return with open(os.devnull, 'wb') as devnull: subprocess.Popen([sys.executable, os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), get_state_dir()[0]], stdout=devnull, stderr=devnull) def populate_context(context, key=None): if key is None: return if key == 'state_dir': state_dir, is_environ = get_state_dir() if is_environ: if not os.path.exists(state_dir): print('Creating global state directory from environment variable: %s' % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get('MOZ_AUTOMATION'): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print('\nCreating default state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == 'topdir': return topsrcdir if key == 'telemetry_handler': return telemetry_handler if key == 'post_dispatch_handler': return post_dispatch_handler raise AttributeError(key) mach = mach.main.Mach(os.getcwd()) mach.populate_context_handler = populate_context if not mach.settings_paths: # default global machrc location mach.settings_paths.append(get_state_dir()[0]) # always load local repository configuration mach.settings_paths.append(mozilla_dir) for category, meta in CATEGORIES.items(): mach.define_category(category, meta['short'], meta['long'], meta['priority']) for path in MACH_MODULES: mach.load_commands_from_file(os.path.join(mozilla_dir, path)) return mach
def post_dispatch_handler(context, handler, instance, result, start_time, end_time, depth, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't write telemetry data if this mach command was invoked as part of another # mach command. if depth != 1 or os.environ.get('MACH_MAIN_PID') != str(os.getpid()): return # Don't write telemetry data for 'mach' when 'DISABLE_TELEMETRY' is set. if os.environ.get('DISABLE_TELEMETRY') == '1': return # We have not opted-in to telemetry if not context.settings.build.telemetry: return from mozbuild.telemetry import gather_telemetry from mozbuild.base import MozbuildObject import mozpack.path as mozpath if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() try: substs = instance.substs except Exception: substs = {} command_attrs = getattr(context, 'command_attrs', {}) # We gather telemetry for every operation. paths = { instance.topsrcdir: '$topsrcdir/', instance.topobjdir: '$topobjdir/', mozpath.normpath(os.path.expanduser('~')): '$HOME/', } # This might override one of the existing entries, that's OK. # We don't use a sigil here because we treat all arguments as potentially relative # paths, so we'd like to get them back as they were specified. paths[mozpath.normpath(os.getcwd())] = '' data = gather_telemetry(command=handler.name, success=(result == 0), start_time=start_time, end_time=end_time, mach_context=context, substs=substs, command_attrs=command_attrs, paths=paths) if data: telemetry_dir = os.path.join(get_state_dir(), 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) if should_skip_telemetry_submission(handler): return True state_dir = get_state_dir() machpath = os.path.join(instance.topsrcdir, 'mach') with open(os.devnull, 'wb') as devnull: subprocess.Popen([ sys.executable, machpath, 'python', '--no-virtualenv', os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), state_dir ], stdout=devnull, stderr=devnull)
def bootstrap(topsrcdir, mozilla_dir=None): if mozilla_dir is None: mozilla_dir = topsrcdir # Ensure we are running Python 2.7+. We put this check here so we generate a # user-friendly error message rather than a cryptic stack trace on module # import. if sys.version_info[0] != 2 or sys.version_info[1] < 7: print( 'Python 2.7 or above (but not Python 3) is required to run mach.') print('You are running Python', platform.python_version()) sys.exit(1) # Global build system and mach state is stored in a central directory. By # default, this is ~/.mozbuild. However, it can be defined via an # environment variable. We detect first run (by lack of this directory # existing) and notify the user that it will be created. The logic for # creation is much simpler for the "advanced" environment variable use # case. For default behavior, we educate users and give them an opportunity # to react. We always exit after creating the directory because users don't # like surprises. sys.path[0:0] = [ os.path.join(mozilla_dir, path) for path in search_path(mozilla_dir, 'build/virtualenv_packages.txt') ] import mach.base import mach.main from mozboot.util import get_state_dir from mozbuild.util import patch_main patch_main() def resolve_repository(): import mozversioncontrol try: # This API doesn't respect the vcs binary choices from configure. # If we ever need to use the VCS binary here, consider something # more robust. return mozversioncontrol.get_repository_object(path=mozilla_dir) except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool): return None def should_skip_telemetry_submission(handler): # The user is performing a maintenance command. if handler.name in ( 'bootstrap', 'doctor', 'mach-commands', 'vcs-setup', # We call mach environment in client.mk which would cause the # data submission to block the forward progress of make. 'environment'): return True # Never submit data when running in automation or when running tests. if any(e in os.environ for e in ('MOZ_AUTOMATION', 'TASK_ID', 'MACH_TELEMETRY_NO_SUBMIT')): return True return False def post_dispatch_handler(context, handler, instance, result, start_time, end_time, depth, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't write telemetry data if this mach command was invoked as part of another # mach command. if depth != 1 or os.environ.get('MACH_MAIN_PID') != str(os.getpid()): return # Don't write telemetry data for 'mach' when 'DISABLE_TELEMETRY' is set. if os.environ.get('DISABLE_TELEMETRY') == '1': return # We have not opted-in to telemetry if not context.settings.build.telemetry: return from mozbuild.telemetry import gather_telemetry from mozbuild.base import MozbuildObject import mozpack.path as mozpath if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() try: substs = instance.substs except Exception: substs = {} command_attrs = getattr(context, 'command_attrs', {}) # We gather telemetry for every operation. paths = { instance.topsrcdir: '$topsrcdir/', instance.topobjdir: '$topobjdir/', mozpath.normpath(os.path.expanduser('~')): '$HOME/', } # This might override one of the existing entries, that's OK. # We don't use a sigil here because we treat all arguments as potentially relative # paths, so we'd like to get them back as they were specified. paths[mozpath.normpath(os.getcwd())] = '' data = gather_telemetry(command=handler.name, success=(result == 0), start_time=start_time, end_time=end_time, mach_context=context, substs=substs, command_attrs=command_attrs, paths=paths) if data: telemetry_dir = os.path.join(get_state_dir(), 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) if should_skip_telemetry_submission(handler): return True state_dir = get_state_dir() machpath = os.path.join(instance.topsrcdir, 'mach') with open(os.devnull, 'wb') as devnull: subprocess.Popen([ sys.executable, machpath, 'python', '--no-virtualenv', os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), state_dir ], stdout=devnull, stderr=devnull) def populate_context(context, key=None): if key is None: return if key == 'state_dir': state_dir = get_state_dir() if state_dir == os.environ.get('MOZBUILD_STATE_PATH'): if not os.path.exists(state_dir): print( 'Creating global state directory from environment variable: %s' % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get('MOZ_AUTOMATION'): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print('\nCreating default state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == 'local_state_dir': return get_state_dir(srcdir=True) if key == 'topdir': return topsrcdir if key == 'post_dispatch_handler': return post_dispatch_handler if key == 'repository': return resolve_repository() raise AttributeError(key) # Note which process is top-level so that recursive mach invocations can avoid writing # telemetry data. if 'MACH_MAIN_PID' not in os.environ: os.environ[b'MACH_MAIN_PID'] = str(os.getpid()).encode('ascii') driver = mach.main.Mach(os.getcwd()) driver.populate_context_handler = populate_context if not driver.settings_paths: # default global machrc location driver.settings_paths.append(get_state_dir()) # always load local repository configuration driver.settings_paths.append(mozilla_dir) for category, meta in CATEGORIES.items(): driver.define_category(category, meta['short'], meta['long'], meta['priority']) repo = resolve_repository() for path in MACH_MODULES: # Sparse checkouts may not have all mach_commands.py files. Ignore # errors from missing files. try: driver.load_commands_from_file(os.path.join(mozilla_dir, path)) except mach.base.MissingFileError: if not repo or not repo.sparse_checkout_present(): raise return driver
def test_migration(cmd, obj_dir, to_test, references): """Test the given recipe. This creates a workdir by l10n-merging gecko-strings and the m-c source, to mimmic gecko-strings after the patch to test landed. It then runs the recipe with a gecko-strings clone as localization, both dry and wet. It inspects the generated commits, and shows a diff between the merged reference and the generated content. The diff is intended to be visually inspected. Some changes might be expected, in particular when formatting of the en-US strings is different. """ rv = 0 migration_name = os.path.splitext(os.path.split(to_test)[1])[0] work_dir = mozpath.join(obj_dir, migration_name) paths = os.path.normpath(to_test).split(os.sep) # Migration modules should be in a sub-folder of l10n. migration_module = (".".join(paths[paths.index("l10n") + 1:-1]) + "." + migration_name) if os.path.exists(work_dir): shutil.rmtree(work_dir) os.makedirs(mozpath.join(work_dir, "reference")) l10n_toml = mozpath.join(cmd.topsrcdir, cmd.substs["MOZ_BUILD_APP"], "locales", "l10n.toml") pc = TOMLParser().parse(l10n_toml, env={"l10n_base": work_dir}) pc.set_locales(["reference"]) files = ProjectFiles("reference", [pc]) for ref in references: if ref != mozpath.normpath(ref): cmd.log( logging.ERROR, "fluent-migration-test", { "file": to_test, "ref": ref, }, 'Reference path "{ref}" needs to be normalized for {file}', ) rv = 1 continue full_ref = mozpath.join(work_dir, "reference", ref) m = files.match(full_ref) if m is None: raise ValueError("Bad reference path: " + ref) m_c_path = m[1] g_s_path = mozpath.join(work_dir, "gecko-strings", ref) resources = [ b"" if not os.path.exists(f) else open(f, "rb").read() for f in (g_s_path, m_c_path) ] ref_dir = os.path.dirname(full_ref) if not os.path.exists(ref_dir): os.makedirs(ref_dir) open(full_ref, "wb").write(merge_channels(ref, resources)) client = hglib.clone( source=mozpath.join(get_state_dir(), "gecko-strings"), dest=mozpath.join(work_dir, "en-US"), ) client.open() old_tip = client.tip().node run_migration = [ cmd._virtualenv_manager.python_path, "-m", "fluent.migrate.tool", "--lang", "en-US", "--reference-dir", mozpath.join(work_dir, "reference"), "--localization-dir", mozpath.join(work_dir, "en-US"), "--dry-run", migration_module, ] cmd.run_process( run_migration, cwd=work_dir, line_handler=print, ) # drop --dry-run run_migration.pop(-2) cmd.run_process( run_migration, cwd=work_dir, line_handler=print, ) tip = client.tip().node if old_tip == tip: cmd.log( logging.WARN, "fluent-migration-test", { "file": to_test, }, "No migration applied for {file}", ) return rv for ref in references: diff_resources( mozpath.join(work_dir, "reference", ref), mozpath.join(work_dir, "en-US", ref), ) messages = [ l.desc.decode("utf-8") for l in client.log(b"::%s - ::%s" % (tip, old_tip)) ] bug = re.search("[0-9]{5,}", migration_name).group() # Just check first message for bug number, they're all following the same pattern if bug not in messages[0]: rv = 1 cmd.log( logging.ERROR, "fluent-migration-test", { "file": to_test, }, "Missing or wrong bug number for {file}", ) if any("part {}".format(n + 1) not in msg for n, msg in enumerate(messages)): rv = 1 cmd.log( logging.ERROR, "fluent-migration-test", { "file": to_test, }, 'Commit messages should have "part {{index}}" for {file}', ) return rv
def bootstrap(self): if self.choice is None: # Like ['1. Firefox for Desktop', '2. Firefox for Android Artifact Mode', ...]. labels = [ '%s. %s' % (i + 1, name) for (i, (name, _)) in enumerate(APPLICATIONS_LIST) ] prompt = APPLICATION_CHOICE % '\n'.join(labels) prompt_choice = self.instance.prompt_int(prompt=prompt, low=1, high=len(APPLICATIONS)) name, application = APPLICATIONS_LIST[prompt_choice - 1] elif self.choice not in APPLICATIONS.keys(): raise Exception('Please pick a valid application choice: (%s)' % '/'.join(APPLICATIONS.keys())) else: name, application = APPLICATIONS[self.choice] self.instance.install_system_packages() # Like 'install_browser_packages' or 'install_mobile_android_packages'. getattr(self.instance, 'install_%s_packages' % application)() hg_installed, hg_modern = self.instance.ensure_mercurial_modern() self.instance.ensure_python_modern() self.instance.ensure_rust_modern() # The state directory code is largely duplicated from mach_bootstrap.py. # We can't easily import mach_bootstrap.py because the bootstrapper may # run in self-contained mode and only the files in this directory will # be available. We /could/ refactor parts of mach_bootstrap.py to be # part of this directory to avoid the code duplication. state_dir, _ = get_state_dir() if not os.path.exists(state_dir): if not self.instance.no_interactive: choice = self.instance.prompt_int( prompt=STATE_DIR_INFO.format(statedir=state_dir), low=1, high=2) if choice == 1: print('Creating global state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) state_dir_available = os.path.exists(state_dir) r = current_firefox_checkout(check_output=self.instance.check_output, hg=self.instance.which('hg')) (checkout_type, checkout_root) = r # Possibly configure Mercurial, but not if the current checkout is Git. # TODO offer to configure Git. if hg_installed and state_dir_available and checkout_type != 'git': configure_hg = False if not self.instance.no_interactive: choice = self.instance.prompt_int(prompt=CONFIGURE_MERCURIAL, low=1, high=2) if choice == 1: configure_hg = True else: configure_hg = self.hg_configure if configure_hg: configure_mercurial(self.instance.which('hg'), state_dir) # Offer to clone if we're not inside a clone. have_clone = False if checkout_type: have_clone = True elif hg_installed and not self.instance.no_interactive: dest = self.input_clone_dest() if dest: have_clone = clone_firefox(self.instance.which('hg'), dest) checkout_root = dest if not have_clone: print(SOURCE_ADVERTISE) # Install the clang packages needed for developing stylo. if not self.instance.no_interactive: # The best place to install our packages is in the state directory # we have. If the user doesn't have one, we need them to re-run # bootstrap and create the directory. # # XXX Android bootstrap just assumes the existence of the state # directory and writes the NDK into it. Should we do the same? if not state_dir_available: print(STYLO_DIRECTORY_MESSAGE.format(statedir=state_dir)) sys.exit(1) if not have_clone: print(STYLO_REQUIRES_CLONE) sys.exit(1) self.instance.state_dir = state_dir self.instance.ensure_stylo_packages(state_dir, checkout_root) print(self.finished % name) if not (self.instance.which('rustc') and self.instance._parse_version('rustc') >= MODERN_RUST_VERSION): print( "To build %s, please restart the shell (Start a new terminal window)" % name) # Like 'suggest_browser_mozconfig' or 'suggest_mobile_android_mozconfig'. getattr(self.instance, 'suggest_%s_mozconfig' % application)()
def sign_msix(output, force=False, log=None, verbose=False): """Sign an MSIX with a locally generated self-signed certificate.""" # TODO: sign on non-Windows hosts. if sys.platform != "win32": raise Exception("sign msix only works on Windows") powershell_exe = find_sdk_tool("powershell.exe", log=log) if not powershell_exe: raise ValueError("powershell is required; " "set POWERSHELL or PATH") def powershell(argstring, check=True): "Invoke `powershell.exe`. Arguments are given as a string to allow consumer to quote." args = [powershell_exe, "-c", argstring] joined = " ".join(shlex_quote(arg) for arg in args) log( logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}" ) return subprocess.run( args, check=check, universal_newlines=True, capture_output=True ).stdout signtool = find_sdk_tool("signtool.exe", log=log) if not signtool: raise ValueError( "signtool is required; " "set SIGNTOOL or WINDOWSSDKDIR or PATH" ) # Our first order of business is to find, or generate, a (self-signed) # certificate. # These are baked into enough places under `browser/` that we need not # extract constants. vendor = "Mozilla" publisher = "CN=Mozilla Corporation, OU=MSIX Packaging" friendly_name = "Mozilla Corporation MSIX Packaging Test Certificate" # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. crt_path = mozpath.join( get_state_dir(), "cache", "mach-msix", "{}.crt".format(friendly_name).replace(" ", "_").lower(), ) crt_path = mozpath.abspath(crt_path) ensureParentDir(crt_path) pfx_path = crt_path.replace(".crt", ".pfx") # TODO: maybe use an actual password. For now, just something that won't be # brute-forced. password = "******" if force or not os.path.isfile(crt_path): log( logging.INFO, "msix", {"crt_path": crt_path}, "Creating new self signed certificate at: {}".format(crt_path), ) thumbprints = [ thumbprint.strip() for thumbprint in powershell( ( "Get-ChildItem -Path Cert:\CurrentUser\My" '| Where-Object {{$_.Subject -Match "{}"}}' '| Where-Object {{$_.FriendlyName -Match "{}"}}' "| Select-Object -ExpandProperty Thumbprint" ).format(vendor, friendly_name) ).splitlines() ] if len(thumbprints) > 1: raise Exception( "Multiple certificates with friendly name found: {}".format( friendly_name ) ) if len(thumbprints) == 1: thumbprint = thumbprints[0] else: thumbprint = None if not thumbprint: thumbprint = ( powershell( ( 'New-SelfSignedCertificate -Type Custom -Subject "{}" ' '-KeyUsage DigitalSignature -FriendlyName "{}"' " -CertStoreLocation Cert:\CurrentUser\My" ' -TextExtension @("2.5.29.37={{text}}1.3.6.1.5.5.7.3.3", ' '"2.5.29.19={{text}}")' "| Select-Object -ExpandProperty Thumbprint" ).format(publisher, friendly_name) ) .strip() .upper() ) if not thumbprint: raise Exception( "Failed to find or create certificate with friendly name: {}".format( friendly_name ) ) powershell( 'Export-Certificate -Cert Cert:\CurrentUser\My\{} -FilePath "{}"'.format( thumbprint, crt_path ) ) log( logging.INFO, "msix", {"crt_path": crt_path}, "Exported public certificate: {crt_path}", ) powershell( ( 'Export-PfxCertificate -Cert Cert:\CurrentUser\My\{} -FilePath "{}"' ' -Password (ConvertTo-SecureString -String "{}" -Force -AsPlainText)' ).format(thumbprint, pfx_path, password) ) log( logging.INFO, "msix", {"pfx_path": pfx_path}, "Exported private certificate: {pfx_path}", ) # Second, to find the right thumbprint to use. We do this here in case # we're coming back to an existing certificate. log( logging.INFO, "msix", {"crt_path": crt_path}, "Signing with existing self signed certificate: {crt_path}", ) thumbprints = [ thumbprint.strip() for thumbprint in powershell( 'Get-PfxCertificate -FilePath "{}" | Select-Object -ExpandProperty Thumbprint'.format( crt_path ) ).splitlines() ] if len(thumbprints) > 1: raise Exception("Multiple thumbprints found for PFX: {}".format(pfx_path)) if len(thumbprints) == 0: raise Exception("No thumbprints found for PFX: {}".format(pfx_path)) thumbprint = thumbprints[0] log( logging.INFO, "msix", {"thumbprint": thumbprint}, "Signing with certificate with thumbprint: {thumbprint}", ) # Third, do the actual signing. args = [ signtool, "sign", "/a", "/fd", "SHA256", "/f", pfx_path, "/p", password, output, ] if not verbose: subprocess.check_call(args, universal_newlines=True) else: # Suppress output unless we fail. try: subprocess.check_output(args, universal_newlines=True) except subprocess.CalledProcessError as e: sys.stderr.write(e.output) raise # As a convenience to the user, tell how to use this certificate if it's not # already trusted, and how to work with MSIX files more generally. if verbose: root_thumbprints = [ root_thumbprint.strip() for root_thumbprint in powershell( "Get-ChildItem -Path Cert:\LocalMachine\Root\{} " "| Select-Object -ExpandProperty Thumbprint".format(thumbprint), check=False, ).splitlines() ] if thumbprint not in root_thumbprints: log( logging.INFO, "msix", {"thumbprint": thumbprint}, "Certificate with thumbprint not found in trusted roots: {thumbprint}", ) log( logging.INFO, "msix", {"crt_path": crt_path, "output": output}, r"""\ # Usage To trust this certificate (requires an elevated shell): powershell -c 'Import-Certificate -FilePath "{crt_path}" -Cert Cert:\LocalMachine\Root\' To verify this MSIX signature exists and is trusted: powershell -c 'Get-AuthenticodeSignature -FilePath "{output}" | Format-List *' To install this MSIX: powershell -c 'Add-AppPackage -path "{output}"' To see details after installing: powershell -c 'Get-AppPackage -name Mozilla.MozillaFirefox(Beta,...)' """.strip(), ) return 0
def repackage_msix( dir_or_package, channel=None, branding=None, template=None, distribution_dirs=[], locale_allowlist=set(), version=None, vendor=None, displayname=None, app_name="firefox", identity=None, publisher=None, publisher_display_name="Mozilla Corporation", arch=None, output=None, force=False, log=None, verbose=False, makeappx=None, ): if not channel: raise Exception("channel is required") if channel not in ["official", "beta", "aurora", "nightly", "unofficial"]: raise Exception("channel is unrecognized: {}".format(channel)) if not branding: raise Exception("branding dir is required") if not os.path.isdir(branding): raise Exception("branding dir {} does not exist".format(branding)) # TODO: maybe we can fish this from the package directly? Maybe from a DLL, # maybe from application.ini? if arch is None or arch not in _MSIX_ARCH.keys(): raise Exception( "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys()) ) if not os.path.exists(dir_or_package): raise Exception("{} does not exist".format(dir_or_package)) if ( os.path.isfile(dir_or_package) and os.path.splitext(dir_or_package)[1] == ".msix" ): # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. msix_dir = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "msix-unpack", ) ) if os.path.exists(msix_dir): shutil.rmtree(msix_dir) ensureParentDir(msix_dir) dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose) log( logging.INFO, "msix", { "input": dir_or_package, }, "Adding files from '{input}'", ) if os.path.isdir(dir_or_package): finder = FileFinder(dir_or_package) else: finder = JarFinder(dir_or_package, JarReader(dir_or_package)) values = get_application_ini_values( finder, dict(section="App", value="CodeName", fallback="Name"), dict(section="App", value="Vendor"), ) first = next(values) displayname = displayname or "Mozilla {}".format(first) second = next(values) vendor = vendor or second # For `AppConstants.jsm` and `brand.properties`, which are in the omnijar in packaged builds. # The nested langpack XPI files can't be read by `mozjar.py`. unpack_finder = UnpackFinder(finder, unpack_xpi=False) if not version: values = get_appconstants_jsm_values( unpack_finder, "MOZ_APP_VERSION_DISPLAY", "MOZ_BUILDID" ) display_version = next(values) buildid = next(values) version = get_embedded_version(display_version, buildid) log( logging.INFO, "msix", { "version": version, "display_version": display_version, "buildid": buildid, }, "AppConstants.jsm display version is '{display_version}' and build ID is '{buildid}':" + " embedded version will be '{version}'", ) # TODO: Bug 1721922: localize this description via Fluent. lines = [] for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"): lines.extend( line for line in f.open().read().decode("utf-8").splitlines() if "brandFullName" in line ) (brandFullName,) = lines # We expect exactly one definition. _, _, brandFullName = brandFullName.partition("=") brandFullName = brandFullName.strip() # We don't have a build at repackage-time to gives us this value, and the # source of truth is a branding-specific `configure.sh` shell script that we # can't easily evaluate completely here. Instead, we take the last value # from `configure.sh`. lines = [ line for line in open(mozpath.join(branding, "configure.sh")).readlines() if "MOZ_IGECKOBACKCHANNEL_IID" in line ] MOZ_IGECKOBACKCHANNEL_IID = lines[-1] _, _, MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.partition("=") MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.strip() if MOZ_IGECKOBACKCHANNEL_IID.startswith(('"', "'")): MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID[1:-1] # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. output_dir = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel) ) ) if channel == "beta": # Release (official) and Beta share branding. Differentiate Beta a little bit. displayname += " Beta" brandFullName += " Beta" # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta # Package Root'. This is `BrandFullName` in the installer, and we want to # be close but to not match. By not matching, we hope to prevent confusion # and/or errors between regularly installed builds and App Package builds. instdir = "{} Package Root".format(displayname) # The standard package name is like "CompanyNoSpaces.ProductNoSpaces". identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "") # We might want to include the publisher ID hash here. I.e., # "__{publisherID}". My locally produced MSIX was named like # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a # missing field, but it's not necessary, since this is just an output file name. package_output_name = "{identity}_{version}_{arch}".format( identity=identity, version=version, arch=_MSIX_ARCH[arch] ) # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. default_output = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name) ) ) output = output or default_output log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}") m = InstallManifest() m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri") m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets") m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS") copier = FileCopier() # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead. for p, f in finder: if not os.path.isdir(dir_or_package): # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe". pp = os.path.relpath(p, "firefox") else: # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already. pp = p if pp.startswith("distribution"): # Treat any existing distribution as a distribution directory, # potentially with language packs. This makes it easy to repack # unpacked MSIXes. distribution_dir = mozpath.join(dir_or_package, "distribution") if distribution_dir not in distribution_dirs: distribution_dirs.append(distribution_dir) continue copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f) # Locales to declare as supported in `AppxManifest.xml`. locales = set(["en-US"]) for distribution_dir in [ mozpath.join(template, "distribution") ] + distribution_dirs: log( logging.INFO, "msix", {"dir": distribution_dir}, "Adding distribution files from {dir}", ) # In automation, we have no easy way to remap the names of artifacts fetched from dependent # tasks. In particular, langpacks will be named like `target.langpack.xpi`. The fetch # tasks do allow us to put them in a per-locale directory, so that the entire set can be # fetched. Here we remap the names. finder = FileFinder(distribution_dir) for p, f in finder: locale = None if os.path.basename(p) == "target.langpack.xpi": # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE". This is how langpacks # are presented in CI. base, locale = os.path.split(os.path.dirname(p)) # Like "locale-LOCALE/[email protected]". This is what AMO # serves and how flatpak builds name langpacks, but not how snap builds name # langpacks. I can't explain the discrepancy. dest = mozpath.normsep( mozpath.join( base, f"locale-{locale}", f"langpack-{locale}@firefox.mozilla.org.xpi", ) ) log( logging.DEBUG, "msix", {"path": p, "dest": dest}, "Renaming langpack {path} to {dest}", ) elif os.path.basename(p).startswith("langpack-"): # Turn "/path/to/[email protected]" into "LOCALE". This is # how langpacks are presented from an unpacked MSIX. _, _, locale = os.path.basename(p).partition("langpack-") locale, _, _ = locale.partition("@") dest = p else: dest = p if locale: locale = locale.strip().lower() locales.add(locale) log( logging.DEBUG, "msix", {"locale": locale, "dest": dest}, "Distributing locale '{locale}' from {dest}", ) dest = mozpath.normsep( mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest) ) if copier.contains(dest): log( logging.INFO, "msix", {"dest": dest, "path": mozpath.join(finder.base, p)}, "Skipping duplicate: {dest} from {path}", ) continue log( logging.DEBUG, "msix", {"dest": dest, "path": mozpath.join(finder.base, p)}, "Adding distribution path: {dest} from {path}", ) copier.add( dest, f, ) locales.remove("en-US") # Windows MSIX packages support a finite set of locales: see # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales. # We distribute all of the langpacks supported by the release channel in our MSIX, which is # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales. But we # only advertise support in the App manifest for the intersection of that set and the set of # supported locales. # # We distribute all langpacks to avoid the following issue. Suppose a user manually installs a # langpack that is not supported by Windows, and then updates the installed MSIX package. MSIX # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to # update the langpack before the update. But, since all langpacks are bundled with the MSIX, # that langpack will be up-to-date, preventing one class of YSOD. unadvertised = set() if locale_allowlist: unadvertised = locales - locale_allowlist locales = locales & locale_allowlist for locale in sorted(unadvertised): log( logging.INFO, "msix", {"locale": locale}, "Not advertising distributed locale '{locale}' that is not recognized by Windows", ) locales = ["en-US"] + list(sorted(locales)) resource_language_list = "\n".join( f' <Resource Language="{locale}" />' for locale in sorted(locales) ) defines = { "APPX_ARCH": _MSIX_ARCH[arch], "APPX_DISPLAYNAME": brandFullName, "APPX_DESCRIPTION": brandFullName, # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or # 'Mozilla.MozillaFirefoxNightly'. "APPX_IDENTITY": identity, # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox # Beta Package Root'. See above. "APPX_INSTDIR": instdir, # Like 'Firefox%20Package%20Root'. "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir), "APPX_PUBLISHER": publisher, "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name, "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list, "APPX_VERSION": version, "MOZ_APP_DISPLAYNAME": displayname, "MOZ_APP_NAME": app_name, "MOZ_IGECKOBACKCHANNEL_IID": MOZ_IGECKOBACKCHANNEL_IID, } m.add_preprocess( mozpath.join(template, "AppxManifest.xml.in"), "AppxManifest.xml", [], defines=defines, marker="<!-- #", # So that we can have well-formed XML. ) m.populate_registry(copier) output_dir = mozpath.abspath(output_dir) ensureParentDir(output_dir) start = time.time() result = copier.copy( output_dir, remove_empty_directories=True, skip_if_older=not force ) if log: log_copy_result(log, time.time() - start, output_dir, result) if verbose: # Dump AppxManifest.xml contents for ease of debugging. log(logging.DEBUG, "msix", {}, "AppxManifest.xml") log(logging.DEBUG, "msix", {}, ">>>") for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines(): log(logging.DEBUG, "msix", {}, line[:-1]) # Drop trailing line terminator. log(logging.DEBUG, "msix", {}, "<<<") if not makeappx: makeappx = find_sdk_tool("makeappx.exe", log=log) if not makeappx: raise ValueError( "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH" ) # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix` # supports only hyphen style. `makeappx.exe` allows to overwrite and to # provide more feedback, so we prefer invoking with these flags. This will # also accommodate `wine makeappx.exe`. stdout = subprocess.run( [makeappx], check=False, capture_output=True, universal_newlines=True ).stdout is_makeappx = "MakeAppx Tool" in stdout if is_makeappx: args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"] else: args = [makeappx, "pack", "-d", output_dir, "-p", output] if verbose and is_makeappx: args.append("/verbose") joined = " ".join(shlex_quote(arg) for arg in args) log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}") sys.stdout.flush() # Otherwise the subprocess output can be interleaved. if verbose: subprocess.check_call(args, universal_newlines=True) else: # Suppress output unless we fail. try: subprocess.check_output(args, universal_newlines=True) except subprocess.CalledProcessError as e: sys.stderr.write(e.output) raise return output
def bootstrap(topsrcdir, mozilla_dir=None): if mozilla_dir is None: mozilla_dir = topsrcdir # Ensure we are running Python 2.7+. We put this check here so we generate a # user-friendly error message rather than a cryptic stack trace on module # import. if sys.version_info[0] != 2 or sys.version_info[1] < 7: print( 'Python 2.7 or above (but not Python 3) is required to run mach.') print('You are running Python', platform.python_version()) sys.exit(1) # Global build system and mach state is stored in a central directory. By # default, this is ~/.mozbuild. However, it can be defined via an # environment variable. We detect first run (by lack of this directory # existing) and notify the user that it will be created. The logic for # creation is much simpler for the "advanced" environment variable use # case. For default behavior, we educate users and give them an opportunity # to react. We always exit after creating the directory because users don't # like surprises. sys.path[0:0] = [ os.path.join(mozilla_dir, path) for path in search_path(mozilla_dir, 'build/virtualenv_packages.txt') ] import mach.main from mozboot.util import get_state_dir from mozbuild.util import patch_main patch_main() def telemetry_handler(context, data): # We have not opted-in to telemetry if 'BUILD_SYSTEM_TELEMETRY' not in os.environ: return telemetry_dir = os.path.join(get_state_dir()[0], 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise # Add common metadata to help submit sorted data later on. data['argv'] = sys.argv data.setdefault('system', {}).update( dict( architecture=list(platform.architecture()), machine=platform.machine(), python_version=platform.python_version(), release=platform.release(), system=platform.system(), version=platform.version(), )) if platform.system() == 'Linux': dist = list(platform.linux_distribution()) data['system']['linux_distribution'] = dist elif platform.system() == 'Windows': win32_ver = list((platform.win32_ver())), data['system']['win32_ver'] = win32_ver elif platform.system() == 'Darwin': # mac version is a special Cupertino snowflake r, v, m = platform.mac_ver() data['system']['mac_ver'] = [r, list(v), m] with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) def should_skip_dispatch(context, handler): # The user is performing a maintenance command. if handler.name in ('bootstrap', 'doctor', 'mach-commands', 'mercurial-setup'): return True # We are running in automation. if 'MOZ_AUTOMATION' in os.environ or 'TASK_ID' in os.environ: return True # The environment is likely a machine invocation. if sys.stdin.closed or not sys.stdin.isatty(): return True return False def post_dispatch_handler(context, handler, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't do anything when... if should_skip_dispatch(context, handler): return # We call mach environment in client.mk which would cause the # data submission below to block the forward progress of make. if handler.name in ('environment'): return # We have not opted-in to telemetry if 'BUILD_SYSTEM_TELEMETRY' not in os.environ: return # Every n-th operation if random.randint(1, TELEMETRY_SUBMISSION_FREQUENCY) != 1: return with open(os.devnull, 'wb') as devnull: subprocess.Popen([ sys.executable, os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), get_state_dir()[0] ], stdout=devnull, stderr=devnull) def populate_context(context, key=None): if key is None: return if key == 'state_dir': state_dir, is_environ = get_state_dir() if is_environ: if not os.path.exists(state_dir): print( 'Creating global state directory from environment variable: %s' % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get('MOZ_AUTOMATION'): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print('\nCreating default state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == 'topdir': return topsrcdir if key == 'telemetry_handler': return telemetry_handler if key == 'post_dispatch_handler': return post_dispatch_handler raise AttributeError(key) mach = mach.main.Mach(os.getcwd()) mach.populate_context_handler = populate_context if not mach.settings_paths: # default global machrc location mach.settings_paths.append(get_state_dir()[0]) # always load local repository configuration mach.settings_paths.append(mozilla_dir) for category, meta in CATEGORIES.items(): mach.define_category(category, meta['short'], meta['long'], meta['priority']) for path in MACH_MODULES: mach.load_commands_from_file(os.path.join(mozilla_dir, path)) return mach
def bootstrap(topsrcdir, mozilla_dir=None): if mozilla_dir is None: mozilla_dir = topsrcdir # Ensure we are running Python 2.7+. We put this check here so we generate a # user-friendly error message rather than a cryptic stack trace on module # import. if sys.version_info[0] != 2 or sys.version_info[1] < 7: print('Python 2.7 or above (but not Python 3) is required to run mach.') print('You are running Python', platform.python_version()) sys.exit(1) # Global build system and mach state is stored in a central directory. By # default, this is ~/.mozbuild. However, it can be defined via an # environment variable. We detect first run (by lack of this directory # existing) and notify the user that it will be created. The logic for # creation is much simpler for the "advanced" environment variable use # case. For default behavior, we educate users and give them an opportunity # to react. We always exit after creating the directory because users don't # like surprises. sys.path[0:0] = [os.path.join(mozilla_dir, path) for path in search_path(mozilla_dir, 'build/virtualenv_packages.txt')] import mach.base import mach.main from mozboot.util import get_state_dir from mozbuild.util import patch_main patch_main() def resolve_repository(): import mozversioncontrol try: # This API doesn't respect the vcs binary choices from configure. # If we ever need to use the VCS binary here, consider something # more robust. return mozversioncontrol.get_repository_object(path=mozilla_dir) except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool): return None def should_skip_telemetry_submission(handler): # The user is performing a maintenance command. if handler.name in ('bootstrap', 'doctor', 'mach-commands', 'vcs-setup', # We call mach environment in client.mk which would cause the # data submission to block the forward progress of make. 'environment'): return True # Never submit data when running in automation or when running tests. if any(e in os.environ for e in ('MOZ_AUTOMATION', 'TASK_ID', 'MACH_TELEMETRY_NO_SUBMIT')): return True return False def post_dispatch_handler(context, handler, instance, result, start_time, end_time, depth, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't write telemetry data if this mach command was invoked as part of another # mach command. if depth != 1 or os.environ.get('MACH_MAIN_PID') != str(os.getpid()): return # Don't write telemetry data for 'mach' when 'DISABLE_TELEMETRY' is set. if os.environ.get('DISABLE_TELEMETRY') == '1': return # We have not opted-in to telemetry if not context.settings.build.telemetry: return from mozbuild.telemetry import gather_telemetry from mozbuild.base import MozbuildObject import mozpack.path as mozpath if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() try: substs = instance.substs except Exception: substs = {} # We gather telemetry for every operation. paths = { instance.topsrcdir: '$topsrcdir/', instance.topobjdir: '$topobjdir/', mozpath.normpath(os.path.expanduser('~')): '$HOME/', } # This might override one of the existing entries, that's OK. # We don't use a sigil here because we treat all arguments as potentially relative # paths, so we'd like to get them back as they were specified. paths[mozpath.normpath(os.getcwd())] = '' data = gather_telemetry(command=handler.name, success=(result == 0), start_time=start_time, end_time=end_time, mach_context=context, substs=substs, paths=paths) if data: telemetry_dir = os.path.join(get_state_dir(), 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) if should_skip_telemetry_submission(handler): return True state_dir = get_state_dir() machpath = os.path.join(instance.topsrcdir, 'mach') with open(os.devnull, 'wb') as devnull: subprocess.Popen([sys.executable, machpath, 'python', '--no-virtualenv', os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), state_dir], stdout=devnull, stderr=devnull) def populate_context(context, key=None): if key is None: return if key == 'state_dir': state_dir = get_state_dir() if state_dir == os.environ.get('MOZBUILD_STATE_PATH'): if not os.path.exists(state_dir): print('Creating global state directory from environment variable: %s' % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get('MOZ_AUTOMATION'): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print('\nCreating default state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == 'local_state_dir': return get_state_dir(srcdir=True) if key == 'topdir': return topsrcdir if key == 'post_dispatch_handler': return post_dispatch_handler if key == 'repository': return resolve_repository() raise AttributeError(key) # Note which process is top-level so that recursive mach invocations can avoid writing # telemetry data. if 'MACH_MAIN_PID' not in os.environ: os.environ[b'MACH_MAIN_PID'] = str(os.getpid()).encode('ascii') driver = mach.main.Mach(os.getcwd()) driver.populate_context_handler = populate_context if not driver.settings_paths: # default global machrc location driver.settings_paths.append(get_state_dir()) # always load local repository configuration driver.settings_paths.append(mozilla_dir) for category, meta in CATEGORIES.items(): driver.define_category(category, meta['short'], meta['long'], meta['priority']) repo = resolve_repository() for path in MACH_MODULES: # Sparse checkouts may not have all mach_commands.py files. Ignore # errors from missing files. try: driver.load_commands_from_file(os.path.join(mozilla_dir, path)) except mach.base.MissingFileError: if not repo or not repo.sparse_checkout_present(): raise return driver
def post_dispatch_handler(context, handler, instance, result, start_time, end_time, depth, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't write telemetry data if this mach command was invoked as part of another # mach command. if depth != 1 or os.environ.get('MACH_MAIN_PID') != str(os.getpid()): return # Don't write telemetry data for 'mach' when 'DISABLE_TELEMETRY' is set. if os.environ.get('DISABLE_TELEMETRY') == '1': return # We have not opted-in to telemetry if not context.settings.build.telemetry: return from mozbuild.telemetry import gather_telemetry from mozbuild.base import MozbuildObject import mozpack.path as mozpath if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() try: substs = instance.substs except Exception: substs = {} # We gather telemetry for every operation. paths = { instance.topsrcdir: '$topsrcdir/', instance.topobjdir: '$topobjdir/', mozpath.normpath(os.path.expanduser('~')): '$HOME/', } # This might override one of the existing entries, that's OK. # We don't use a sigil here because we treat all arguments as potentially relative # paths, so we'd like to get them back as they were specified. paths[mozpath.normpath(os.getcwd())] = '' data = gather_telemetry(command=handler.name, success=(result == 0), start_time=start_time, end_time=end_time, mach_context=context, substs=substs, paths=paths) if data: telemetry_dir = os.path.join(get_state_dir(), 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) if should_skip_telemetry_submission(handler): return True state_dir = get_state_dir() machpath = os.path.join(instance.topsrcdir, 'mach') with open(os.devnull, 'wb') as devnull: subprocess.Popen([sys.executable, machpath, 'python', '--no-virtualenv', os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), state_dir], stdout=devnull, stderr=devnull)
VCS_NOT_FOUND = """ Could not detect version control. Only `hg` or `git` are supported. """.strip() UNCOMMITTED_CHANGES = """ ERROR please commit changes before continuing """.strip() MAX_HISTORY = 10 here = os.path.abspath(os.path.dirname(__file__)) build = MozbuildObject.from_environment(cwd=here) vcs = get_repository_object(build.topsrcdir) history_path = os.path.join(get_state_dir(srcdir=True), 'history', 'try_task_configs.json') def write_task_config(try_task_config): config_path = os.path.join(vcs.path, 'try_task_config.json') with open(config_path, 'w') as fh: json.dump(try_task_config, fh, indent=4, separators=(',', ': '), sort_keys=True) fh.write('\n') return config_path
from mozboot.util import get_state_dir from mozterm import Terminal from ..cli import BaseTryParser from ..tasks import generate_tasks, filter_tasks_by_paths from ..push import check_working_directory, push_to_try, generate_try_task_config terminal = Terminal() here = os.path.abspath(os.path.dirname(__file__)) build = MozbuildObject.from_environment(cwd=here) PREVIEW_SCRIPT = os.path.join(build.topsrcdir, 'tools/tryselect/formatters/preview.py') TASK_DURATION_URL = 'https://storage.googleapis.com/mozilla-mach-data/task_duration_history.json' GRAPH_QUANTILES_URL = 'https://storage.googleapis.com/mozilla-mach-data/machtry_quantiles.csv' TASK_DURATION_CACHE = os.path.join(get_state_dir( srcdir=True), 'cache', 'task_duration_history.json') GRAPH_QUANTILE_CACHE = os.path.join(get_state_dir( srcdir=True), 'cache', 'graph_quantile_cache.csv') TASK_DURATION_TAG_FILE = os.path.join(get_state_dir( srcdir=True), 'cache', 'task_duration_tag.json') # Some tasks show up in the target task set, but are either special cases # or uncommon enough that they should only be selectable with --full. TARGET_TASK_FILTERS = ( '.*-ccov\/.*', 'windows10-aarch64/opt.*', 'android-hw.*', '.*android-geckoview-docs.*', '.*win64-aarch64-laptop.*', )
def fzf_bootstrap(update=False): """Bootstrap fzf if necessary and return path to the executable. The bootstrap works by cloning the fzf repository and running the included `install` script. If update is True, we will pull the repository and re-run the install script. """ fzf_bin = find_executable("fzf") if fzf_bin and should_force_fzf_update(fzf_bin): update = True if fzf_bin and not update: return fzf_bin fzf_path = os.path.join(get_state_dir(), "fzf") # Bug 1623197: We only want to run fzf's `install` if it's not in the $PATH # Swap to os.path.commonpath when we're not on Py2 if fzf_bin and update and not fzf_bin.startswith(fzf_path): print("fzf installed somewhere other than {}, please update manually". format(fzf_path)) sys.exit(1) def get_fzf(): return find_executable("fzf", os.path.join(fzf_path, "bin")) if os.path.isdir(fzf_path): if update: ret = run_cmd(["git", "pull"], cwd=fzf_path) if ret: print("Update fzf failed.") sys.exit(1) run_fzf_install_script(fzf_path) return get_fzf() fzf_bin = get_fzf() if not fzf_bin or should_force_fzf_update(fzf_bin): return fzf_bootstrap(update=True) return fzf_bin if not update: install = input("Could not detect fzf, install it now? [y/n]: ") if install.lower() != "y": return if not find_executable("git"): print("Git not found.") print(FZF_INSTALL_FAILED) sys.exit(1) cmd = [ "git", "clone", "--depth", "1", "https://github.com/junegunn/fzf.git" ] if subprocess.call(cmd, cwd=os.path.dirname(fzf_path)): print(FZF_INSTALL_FAILED) sys.exit(1) run_fzf_install_script(fzf_path) print("Installed fzf to {}".format(fzf_path)) return get_fzf()
def run( update=False, query=None, intersect_query=None, try_config=None, full=False, parameters=None, save_query=False, push=True, message="{msg}", test_paths=None, exact=False, closed_tree=False, show_estimates=False, disable_target_task_filter=False, ): fzf = fzf_bootstrap(update) if not fzf: print(FZF_NOT_FOUND) return 1 check_working_directory(push) tg = generate_tasks(parameters, full=full, disable_target_task_filter=disable_target_task_filter) all_tasks = sorted(tg.tasks.keys()) # graph_Cache created by generate_tasks, recreate the path to that file. cache_dir = os.path.join(get_state_dir(srcdir=True), "cache", "taskgraph") if full: graph_cache = os.path.join(cache_dir, "full_task_graph") dep_cache = os.path.join(cache_dir, "full_task_dependencies") target_set = os.path.join(cache_dir, "full_task_set") else: graph_cache = os.path.join(cache_dir, "target_task_graph") dep_cache = os.path.join(cache_dir, "target_task_dependencies") target_set = os.path.join(cache_dir, "target_task_set") if show_estimates: download_task_history_data(cache_dir=cache_dir) make_trimmed_taskgraph_cache(graph_cache, dep_cache, target_file=target_set) if not full and not disable_target_task_filter: # Put all_tasks into a list because it's used multiple times, and "filter()" # returns a consumable iterator. all_tasks = list(filter(filter_by_uncommon_try_tasks, all_tasks)) if test_paths: all_tasks = filter_tasks_by_paths(all_tasks, test_paths) if not all_tasks: return 1 key_shortcuts = [k + ":" + v for k, v in fzf_shortcuts.items()] base_cmd = [ fzf, "-m", "--bind", ",".join(key_shortcuts), "--header", format_header(), "--preview-window=right:30%", "--print-query", ] if show_estimates: base_cmd.extend([ "--preview", '{} {} -g {} -s -c {} -t "{{+f}}"'.format(sys.executable, PREVIEW_SCRIPT, dep_cache, cache_dir), ]) else: base_cmd.extend([ "--preview", '{} {} -t "{{+f}}"'.format(sys.executable, PREVIEW_SCRIPT), ]) if exact: base_cmd.append("--exact") selected = set() queries = [] def get_tasks(query_arg=None, candidate_tasks=all_tasks): cmd = base_cmd[:] if query_arg and query_arg != "INTERACTIVE": cmd.extend(["-f", query_arg]) query_str, tasks = run_fzf(cmd, sorted(candidate_tasks)) queries.append(query_str) return set(tasks) for q in query or []: selected |= get_tasks(q) for q in intersect_query or []: if not selected: tasks = get_tasks(q) selected |= tasks else: tasks = get_tasks(q, selected) selected &= tasks if not queries: selected = get_tasks() if not selected: print("no tasks selected") return if save_query: return queries # build commit message msg = "Fuzzy" args = ["query={}".format(q) for q in queries] if test_paths: args.append("paths={}".format(":".join(test_paths))) if args: msg = "{} {}".format(msg, "&".join(args)) return push_to_try( "fuzzy", message.format(msg=msg), try_task_config=generate_try_task_config("fuzzy", selected, try_config), push=push, closed_tree=closed_tree, )
def run(update=False, query=None, intersect_query=None, try_config=None, full=False, parameters=None, save_query=False, push=True, message='{msg}', test_paths=None, exact=False, closed_tree=False, show_estimates=False): fzf = fzf_bootstrap(update) if not fzf: print(FZF_NOT_FOUND) return 1 check_working_directory(push) tg = generate_tasks(parameters, full) all_tasks = sorted(tg.tasks.keys()) cache_dir = os.path.join(get_state_dir(srcdir=True), 'cache', 'taskgraph') if full: graph_cache = os.path.join(cache_dir, 'full_task_graph') dep_cache = os.path.join(cache_dir, 'full_task_dependencies') else: graph_cache = os.path.join(cache_dir, 'target_task_graph') dep_cache = os.path.join(cache_dir, 'target_task_dependencies') if show_estimates: download_task_history_data() make_trimmed_taskgraph_cache(graph_cache, dep_cache) if not full: all_tasks = filter(filter_target_task, all_tasks) if test_paths: all_tasks = filter_tasks_by_paths(all_tasks, test_paths) if not all_tasks: return 1 key_shortcuts = [k + ':' + v for k, v in fzf_shortcuts.iteritems()] base_cmd = [ fzf, '-m', '--bind', ','.join(key_shortcuts), '--header', format_header(), '--preview-window=right:30%', '--print-query', ] if show_estimates and os.path.isfile(TASK_DURATION_CACHE): base_cmd.extend([ '--preview', 'python {} -g {} -d {} -q {} "{{+}}"'.format( PREVIEW_SCRIPT, dep_cache, TASK_DURATION_CACHE, GRAPH_QUANTILE_CACHE), ]) else: base_cmd.extend([ '--preview', 'python {} "{{+}}"'.format(PREVIEW_SCRIPT), ]) if exact: base_cmd.append('--exact') selected = set() queries = [] def get_tasks(query_arg=None, candidate_tasks=all_tasks): cmd = base_cmd[:] if query_arg and query_arg != 'INTERACTIVE': cmd.extend(['-f', query_arg]) query_str, tasks = run_fzf(cmd, sorted(candidate_tasks)) queries.append(query_str) return set(tasks) for q in query or []: selected |= get_tasks(q) for q in intersect_query or []: if not selected: tasks = get_tasks(q) selected |= tasks else: tasks = get_tasks(q, selected) selected &= tasks if not queries: selected = get_tasks() if not selected: print("no tasks selected") return if save_query: return queries # build commit message msg = "Fuzzy" args = ["query={}".format(q) for q in queries] if test_paths: args.append("paths={}".format(':'.join(test_paths))) if args: msg = "{} {}".format(msg, '&'.join(args)) return push_to_try('fuzzy', message.format(msg=msg), try_task_config=generate_try_task_config('fuzzy', selected, try_config), push=push, closed_tree=closed_tree)
def bootstrap(topsrcdir, mozilla_dir=None): if mozilla_dir is None: mozilla_dir = topsrcdir # Ensure we are running Python 2.7+. We put this check here so we generate a # user-friendly error message rather than a cryptic stack trace on module # import. if sys.version_info[0] != 2 or sys.version_info[1] < 7: print('Python 2.7 or above (but not Python 3) is required to run mach.') print('You are running Python', platform.python_version()) sys.exit(1) # Global build system and mach state is stored in a central directory. By # default, this is ~/.mozbuild. However, it can be defined via an # environment variable. We detect first run (by lack of this directory # existing) and notify the user that it will be created. The logic for # creation is much simpler for the "advanced" environment variable use # case. For default behavior, we educate users and give them an opportunity # to react. We always exit after creating the directory because users don't # like surprises. sys.path[0:0] = [os.path.join(mozilla_dir, path) for path in search_path(mozilla_dir, 'build/virtualenv_packages.txt')] import mach.base import mach.main from mozboot.util import get_state_dir from mozbuild.util import patch_main patch_main() def resolve_repository(): import mozversioncontrol try: # This API doesn't respect the vcs binary choices from configure. # If we ever need to use the VCS binary here, consider something # more robust. return mozversioncontrol.get_repository_object(path=mozilla_dir) except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool): return None def telemetry_handler(context, data): # We have not opted-in to telemetry if not context.settings.build.telemetry: return telemetry_dir = os.path.join(get_state_dir()[0], 'telemetry') try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise outgoing_dir = os.path.join(telemetry_dir, 'outgoing') try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), 'w') as f: json.dump(data, f, sort_keys=True) def should_skip_dispatch(context, handler): # The user is performing a maintenance command. if handler.name in ('bootstrap', 'doctor', 'mach-commands', 'vcs-setup', # We call mach environment in client.mk which would cause the # data submission to block the forward progress of make. 'environment'): return True # We are running in automation. if 'MOZ_AUTOMATION' in os.environ or 'TASK_ID' in os.environ: return True # The environment is likely a machine invocation. if sys.stdin.closed or not sys.stdin.isatty(): return True return False def post_dispatch_handler(context, handler, instance, result, start_time, end_time, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't do anything when... if should_skip_dispatch(context, handler): return # We have not opted-in to telemetry if not context.settings.build.telemetry: return from mozbuild.telemetry import gather_telemetry from mozbuild.base import MozbuildObject if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() try: substs = instance.substs except Exception: substs = {} # We gather telemetry for every operation... gather_telemetry(command=handler.name, success=(result == 0), start_time=start_time, end_time=end_time, mach_context=context, substs=substs, paths=[instance.topsrcdir, instance.topobjdir]) # But only submit about every n-th operation if random.randint(1, TELEMETRY_SUBMISSION_FREQUENCY) != 1: return with open(os.devnull, 'wb') as devnull: subprocess.Popen([sys.executable, os.path.join(topsrcdir, 'build', 'submit_telemetry_data.py'), get_state_dir()[0]], stdout=devnull, stderr=devnull) def populate_context(context, key=None): if key is None: return if key == 'state_dir': state_dir, is_environ = get_state_dir() if is_environ: if not os.path.exists(state_dir): print('Creating global state directory from environment variable: %s' % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get('MOZ_AUTOMATION'): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print('\nCreating default state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == 'topdir': return topsrcdir if key == 'telemetry_handler': return telemetry_handler if key == 'post_dispatch_handler': return post_dispatch_handler if key == 'repository': return resolve_repository() raise AttributeError(key) driver = mach.main.Mach(os.getcwd()) driver.populate_context_handler = populate_context if not driver.settings_paths: # default global machrc location driver.settings_paths.append(get_state_dir()[0]) # always load local repository configuration driver.settings_paths.append(mozilla_dir) for category, meta in CATEGORIES.items(): driver.define_category(category, meta['short'], meta['long'], meta['priority']) repo = resolve_repository() for path in MACH_MODULES: # Sparse checkouts may not have all mach_commands.py files. Ignore # errors from missing files. try: driver.load_commands_from_file(os.path.join(mozilla_dir, path)) except mach.base.MissingFileError: if not repo or not repo.sparse_checkout_present(): raise return driver
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals import ConfigParser import os import subprocess from mozboot.util import get_state_dir CONFIG_PATH = os.path.join(get_state_dir()[0], "autotry.ini") def list_presets(section=None): config = ConfigParser.RawConfigParser() data = [] if config.read([CONFIG_PATH]): sections = [section] if section else config.sections() for s in sections: try: data.extend(config.items(s)) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): pass if not data: print("No presets found")
from mozboot.util import get_state_dir from mozbuild.base import MozbuildObject from mozpack.files import FileFinder from moztest.resolve import TestResolver from mozversioncontrol import get_repository_object from ..cli import BaseTryParser from ..tasks import generate_tasks, filter_tasks_by_paths, resolve_tests_by_suite from ..push import push_to_try, generate_try_task_config here = os.path.abspath(os.path.dirname(__file__)) build = MozbuildObject.from_environment(cwd=here) vcs = get_repository_object(build.topsrcdir) root_hash = hashlib.sha256(os.path.abspath(build.topsrcdir)).hexdigest() cache_dir = os.path.join(get_state_dir(), 'cache', root_hash, 'chunk_mapping') if not os.path.isdir(cache_dir): os.makedirs(cache_dir) CHUNK_MAPPING_FILE = os.path.join(cache_dir, 'chunk_mapping.sqlite') CHUNK_MAPPING_TAG_FILE = os.path.join(cache_dir, 'chunk_mapping_tag.json') # Maps from platform names in the chunk_mapping sqlite database to respective # substrings in task names. PLATFORM_MAP = { 'linux': 'test-linux64/opt', 'windows': 'test-windows10-64/opt', } # List of platform/build type combinations that are included in pushes by |mach try coverage|. OPT_TASK_PATTERNS = [ 'macosx64/opt',
def bootstrap(topsrcdir, mozilla_dir=None): if mozilla_dir is None: mozilla_dir = topsrcdir # Ensure we are running Python 2.7 or 3.5+. We put this check here so we # generate a user-friendly error message rather than a cryptic stack trace # on module import. major, minor = sys.version_info[:2] if (major == 2 and minor < 7) or (major == 3 and minor < 5): print('Python 2.7 or Python 3.5+ is required to run mach.') print('You are running Python', platform.python_version()) sys.exit(1) # Global build system and mach state is stored in a central directory. By # default, this is ~/.mozbuild. However, it can be defined via an # environment variable. We detect first run (by lack of this directory # existing) and notify the user that it will be created. The logic for # creation is much simpler for the "advanced" environment variable use # case. For default behavior, we educate users and give them an opportunity # to react. We always exit after creating the directory because users don't # like surprises. sys.path[0:0] = [ os.path.join(mozilla_dir, path) for path in search_path( mozilla_dir, 'build/mach_virtualenv_packages.txt') ] import mach.base import mach.main from mach.util import setenv from mozboot.util import get_state_dir # Set a reasonable limit to the number of open files. # # Some linux systems set `ulimit -n` to a very high number, which works # well for systems that run servers, but this setting causes performance # problems when programs close file descriptors before forking, like # Python's `subprocess.Popen(..., close_fds=True)` (close_fds=True is the # default in Python 3), or Rust's stdlib. In some cases, Firefox does the # same thing when spawning processes. We would prefer to lower this limit # to avoid such performance problems; processes spawned by `mach` will # inherit the limit set here. # # The Firefox build defaults the soft limit to 1024, except for builds that # do LTO, where the soft limit is 8192. We're going to default to the # latter, since people do occasionally do LTO builds on their local # machines, and requiring them to discover another magical setting after # setting up an LTO build in the first place doesn't seem good. # # This code mimics the code in taskcluster/scripts/run-task. try: import resource # Keep the hard limit the same, though, allowing processes to change # their soft limit if they need to (Firefox does, for instance). (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE) # Permit people to override our default limit if necessary via # MOZ_LIMIT_NOFILE, which is the same variable `run-task` uses. limit = os.environ.get('MOZ_LIMIT_NOFILE') if limit: limit = int(limit) else: # If no explicit limit is given, use our default if it's less than # the current soft limit. For instance, the default on macOS is # 256, so we'd pick that rather than our default. limit = min(soft, 8192) # Now apply the limit, if it's different from the original one. if limit != soft: resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) except ImportError: # The resource module is UNIX only. pass from mozbuild.util import patch_main patch_main() def resolve_repository(): import mozversioncontrol try: # This API doesn't respect the vcs binary choices from configure. # If we ever need to use the VCS binary here, consider something # more robust. return mozversioncontrol.get_repository_object(path=mozilla_dir) except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool): return None def pre_dispatch_handler(context, handler, args): # If --disable-tests flag was enabled in the mozconfig used to compile # the build, tests will be disabled. Instead of trying to run # nonexistent tests then reporting a failure, this will prevent mach # from progressing beyond this point. if handler.category == 'testing' and not handler.ok_if_tests_disabled: from mozbuild.base import BuildEnvironmentNotFoundException try: from mozbuild.base import MozbuildObject # all environments should have an instance of build object. build = MozbuildObject.from_environment() if build is not None and hasattr(build, 'mozconfig'): ac_options = build.mozconfig['configure_args'] if ac_options and '--disable-tests' in ac_options: print( 'Tests have been disabled by mozconfig with the flag ' + '"ac_add_options --disable-tests".\n' + 'Remove the flag, and re-compile to enable tests.') sys.exit(1) except BuildEnvironmentNotFoundException: # likely automation environment, so do nothing. pass def post_dispatch_handler(context, handler, instance, success, start_time, end_time, depth, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't finalize telemetry data if this mach command was invoked as part of # another mach command. if depth != 1: return _finalize_telemetry_glean(context.telemetry, handler.name == 'bootstrap', success) _finalize_telemetry_legacy(context, instance, handler, success, start_time, end_time, topsrcdir) def populate_context(key=None): if key is None: return if key == 'state_dir': state_dir = get_state_dir() if state_dir == os.environ.get('MOZBUILD_STATE_PATH'): if not os.path.exists(state_dir): print( 'Creating global state directory from environment variable: %s' % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get('MOZ_AUTOMATION'): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print('\nCreating default state directory: %s' % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == 'local_state_dir': return get_state_dir(srcdir=True) if key == 'topdir': return topsrcdir if key == 'pre_dispatch_handler': return pre_dispatch_handler if key == 'post_dispatch_handler': return post_dispatch_handler if key == 'repository': return resolve_repository() raise AttributeError(key) # Note which process is top-level so that recursive mach invocations can avoid writing # telemetry data. if 'MACH_MAIN_PID' not in os.environ: setenv('MACH_MAIN_PID', str(os.getpid())) driver = mach.main.Mach(os.getcwd()) driver.populate_context_handler = populate_context if not driver.settings_paths: # default global machrc location driver.settings_paths.append(get_state_dir()) # always load local repository configuration driver.settings_paths.append(mozilla_dir) for category, meta in CATEGORIES.items(): driver.define_category(category, meta['short'], meta['long'], meta['priority']) repo = resolve_repository() for path in MACH_MODULES: # Sparse checkouts may not have all mach_commands.py files. Ignore # errors from missing files. try: driver.load_commands_from_file(os.path.join(mozilla_dir, path)) except mach.base.MissingFileError: if not repo or not repo.sparse_checkout_present(): raise return driver