def docker_worker_hazard(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker.setdefault("artifacts", []) docker_worker_add_artifacts(config, job, taskdesc) worker.setdefault("required-volumes", []).append("{workdir}/workspace".format(**run)) add_tooltool(config, job, taskdesc) setup_secrets(config, job, taskdesc) env = worker["env"] env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) # script parameters if run.get("mozconfig"): env["MOZCONFIG"] = run.pop("mozconfig") run["using"] = "run-task" run["cwd"] = run["workdir"] configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_spidermonkey(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker.setdefault('artifacts', []) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run.pop('spidermonkey-variant'), 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) if 'spidermonkey-platform' in run: env['SPIDERMONKEY_PLATFORM'] = run.pop('spidermonkey-platform') script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" run['using'] = 'run-task' run['cwd'] = run['workdir'] run['command'] = [ './checkouts/gecko/taskcluster/scripts/builder/{script}'.format( script=script) ] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def configure_gradlew(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker.setdefault("env", {}).update({ "ANDROID_SDK_ROOT": path.join(run["workdir"], worker["env"]["MOZ_FETCHES_DIR"], "android-sdk-linux") }) # defer to the run_task implementation run["command"] = _extract_command(run) secrets = run.pop("secrets", []) scopes = taskdesc.setdefault("scopes", []) new_secret_scopes = [ "secrets:get:{}".format(secret["name"]) for secret in secrets ] new_secret_scopes = list( set(new_secret_scopes)) # Scopes must not have any duplicates scopes.extend(new_secret_scopes) run["cwd"] = "{checkout}" run["using"] = "run-task" configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
def docker_worker_spidermonkey(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker.setdefault("artifacts", []) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault("env", {}) env.update({ "MOZHARNESS_DISABLE": "true", "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"), "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) if "spidermonkey-platform" in run: env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform") script = "build-sm.sh" if run["using"] == "spidermonkey-package": script = "build-sm-package.sh" run["using"] = "run-task" run["cwd"] = run["workdir"] run["command"] = [ "./checkouts/gecko/taskcluster/scripts/builder/{script}".format( script=script) ] configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_hazard(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker['artifacts'] = [] docker_worker_add_artifacts(config, job, taskdesc) worker.setdefault('required-volumes', []).append('{workdir}/workspace'.format(**run)) add_tooltool(config, job, taskdesc) setup_secrets(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) # script parameters if run.get('mozconfig'): env['MOZCONFIG'] = run.pop('mozconfig') run['using'] = 'run-task' run['cwd'] = run['workdir'] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def configure_mach(config, job, taskdesc): run = job['run'] additional_prefix = [] if job['worker-type'].endswith('1014'): additional_prefix = [ 'LC_ALL=en_US.UTF-8', 'LANG=en_US.UTF-8' ] command_prefix = ' '.join(additional_prefix + ['./mach ']) mach = run['mach'] if isinstance(mach, dict): ref, pattern = next(iter(mach.items())) command = {ref: command_prefix + pattern} else: command = command_prefix + mach # defer to the run_task implementation run['command'] = command run['cwd'] = '{checkout}' run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def generic_worker_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker["chain-of-trust"] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault("artifacts", []) if not artifacts: generic_worker_add_artifacts(config, job, taskdesc) if job["worker"]["os"] == "windows": # There were no caches on generic-worker before bug 1519472, and they cause # all sorts of problems with Windows toolchain tasks, disable them until # tasks are ready. run["use-caches"] = False env = worker.setdefault("env", {}) env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) # Use `mach` to invoke python scripts so in-tree libraries are available. if run["script"].endswith(".py"): raise NotImplementedError( "Python toolchain scripts aren't supported on generic-worker") attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") digest_data = get_digest_data(config, run, taskdesc) if job.get("attributes", {}).get("cached_task") is not False and not taskgraph.fast: name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": digest_data, } run["using"] = "run-task" args = run.pop("arguments", "") if args: args = " " + shell_quote(*args) if job["worker"]["os"] == "windows": gecko_path = "%GECKO_PATH%" else: gecko_path = "$GECKO_PATH" run["command"] = "{}/taskcluster/scripts/misc/{}{}".format( gecko_path, run.pop("script"), args) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_mach(config, job, taskdesc): run = job['run'] # defer to the run_task implementation run['command'] = 'cd {workdir}/checkouts/gecko && ./mach {mach}'.format(**run) run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def docker_worker_mach(config, job, taskdesc): run = job['run'] # defer to the run_task implementation run['command'] = 'cd /builds/worker/checkouts/gecko && ./mach ' + run['mach'] run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def windows_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] # Allow the job to specify where artifacts come from. worker.setdefault( "artifacts", [{ "path": r"public\build", "type": "directory", }], ) worker["chain-of-trust"] = True # There were no caches on generic-worker before bug 1519472, and they cause # all sorts of problems with toolchain tasks, disable them until # tasks are ready. run["use-caches"] = False env = worker.setdefault("env", {}) env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) # Use `mach` to invoke python scripts so in-tree libraries are available. if run["script"].endswith(".py"): raise NotImplementedError("Python scripts don't work on Windows") args = run.get("arguments", "") if args: args = " " + shell_quote(*args) attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") if not taskgraph.fast: name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": get_digest_data(config, run, taskdesc), } bash = r"c:\mozilla-build\msys\bin\bash" run["using"] = "run-task" run["command"] = [ # do something intelligent. r"{} build/src/taskcluster/scripts/misc/{}{}".format( bash, run.pop("script"), args) ] run.pop("arguments", None) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def configure_mach(config, job, taskdesc): run = job['run'] # defer to the run_task implementation run['command'] = 'cd $GECKO_PATH && ./mach {mach}'.format(**run) run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def configure_python_test(config, job, taskdesc): run = job['run'] worker = job['worker'] # defer to the mach implementation run['mach'] = 'python-test --subsuite {subsuite}'.format(**run) run['using'] = 'mach' del run['subsuite'] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def configure_python_test(config, job, taskdesc): run = job["run"] worker = job["worker"] # defer to the mach implementation run["mach"] = ("python-test --subsuite {subsuite} --run-slow").format( **run) run["using"] = "mach" del run["subsuite"] configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_mach(config, job, taskdesc): run = job['run'] # defer to the run_task implementation run['command'] = 'cd /builds/worker/checkouts/gecko && ./mach ' + run[ 'mach'] run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def windows_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] # Allow the job to specify where artifacts come from. worker.setdefault('artifacts', [{ 'path': r'public\build', 'type': 'directory', }]) worker['chain-of-trust'] = True # There were no caches on generic-worker before bug 1519472, and they cause # all sorts of problems with toolchain tasks, disable them until # tasks are ready. run['use-caches'] = False env = worker.setdefault('env', {}) env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): raise NotImplementedError("Python scripts don't work on Windows") args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run.pop('toolchain-artifact') if 'toolchain-alias' in run: attributes['toolchain-alias'] = run.pop('toolchain-alias') if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) taskdesc['cache'] = { 'type': CACHE_TYPE, 'name': name, 'digest-data': get_digest_data(config, run, taskdesc), } bash = r'c:\mozilla-build\msys\bin\bash' run['using'] = 'run-task' run['command'] = [ # do something intelligent. r'{} build/src/taskcluster/scripts/misc/{}{}'.format( bash, run.pop('script'), args) ] run.pop('arguments', None) configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def docker_worker_python_test(config, job, taskdesc): run = job['run'] # defer to the run_task implementation run['command'] = 'cd {workdir}/checkouts/gecko && ' \ './mach python-test --python {python-version} --subsuite {subsuite}'.format(**run) run['using'] = 'run-task' del run['python-version'] del run['subsuite'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker['chain-of-trust'] = True # If the task doesn't have a docker-image, set a default worker.setdefault('docker-image', {'in-tree': 'toolchain-build'}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any( artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) # Toolchain checkouts don't live under {workdir}/checkouts workspace = '{workdir}/workspace/build'.format(**run) gecko_path = '{}/src'.format(workspace) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'GECKO_PATH': gecko_path, }) attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run.pop('toolchain-artifact') if 'toolchain-alias' in run: attributes['toolchain-alias'] = run.pop('toolchain-alias') if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) taskdesc['cache'] = { 'type': CACHE_TYPE, 'name': name, 'digest-data': get_digest_data(config, run, taskdesc), } # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = [path.join(gecko_path, 'mach'), 'python'] else: wrapper = [] run['using'] = 'run-task' run['cwd'] = run['workdir'] run["command"] = (wrapper + [ "workspace/build/src/taskcluster/scripts/misc/{}".format( run.pop("script")) ] + run.pop("arguments", [])) configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def configure_gradlew(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker.setdefault("env", {}).update( {"ANDROID_SDK_ROOT": path.join(run["workdir"], "android-sdk-linux")}) run["command"] = _extract_gradlew_command(run) _inject_secrets_scopes(run, taskdesc) _set_run_task_attributes(job) configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
def configure_run_commands_schema(config, job, taskdesc): run = job["run"] pre_commands = [ _generate_secret_command(secret) for secret in run.get("secrets", []) ] all_commands = pre_commands + run.pop("commands", []) run["command"] = _convert_commands_to_string(all_commands) _inject_secrets_scopes(run, taskdesc) _set_run_task_attributes(job) configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
def docker_worker_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker["chain-of-trust"] = True # If the task doesn't have a docker-image, set a default worker.setdefault("docker-image", {"in-tree": "deb10-toolchain-build"}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault("artifacts", []) if not artifacts: docker_worker_add_artifacts(config, job, taskdesc) # Toolchain checkouts don't live under {workdir}/checkouts workspace = "{workdir}/workspace/build".format(**run) gecko_path = "{}/src".format(workspace) env = worker.setdefault("env", {}) env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], "GECKO_PATH": gecko_path, }) attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") digest_data = get_digest_data(config, run, taskdesc) if job.get("attributes", {}).get("cached_task") is not False and not taskgraph.fast: name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": digest_data, } run["using"] = "run-task" run["cwd"] = run["workdir"] run["command"] = [ "workspace/build/src/taskcluster/scripts/misc/{}".format( run.pop("script")) ] + run.pop("arguments", []) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def configure_python_test(config, job, taskdesc): run = job['run'] worker = job['worker'] if worker['os'] == 'macosx' and run['python-version'] == 3: # OSX hosts can't seem to find python 3 on their own run['python-version'] = '/usr/local/bin/python3' # defer to the mach implementation run['mach'] = 'python-test --python {python-version} --subsuite {subsuite}'.format( **run) run['using'] = 'mach' del run['python-version'] del run['subsuite'] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def generic_worker_spidermonkey(config, job, taskdesc): assert job['worker']['os'] == 'windows', 'only supports windows right now' run = job['run'] worker = taskdesc['worker'] = job['worker'] generic_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run.pop('spidermonkey-variant'), 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'SCCACHE_DISABLE': "1", 'WORK': ".", # Override the defaults in build scripts 'GECKO_PATH': "./src", # with values suiteable for windows generic worker 'UPLOAD_DIR': "./public/build" }) if 'spidermonkey-platform' in run: env['SPIDERMONKEY_PLATFORM'] = run.pop('spidermonkey-platform') script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-package is not a supported configuration") elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-mozjs-crate is not a supported configuration") elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-rust-bindings is not a supported configuration") run['using'] = 'run-task' run['command'] = [ 'c:\\mozilla-build\\msys\\bin\\bash.exe ' # string concat '"./src/taskcluster/scripts/builder/%s"' % script ] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def generic_worker_spidermonkey(config, job, taskdesc): assert job["worker"]["os"] == "windows", "only supports windows right now" run = job["run"] worker = taskdesc["worker"] = job["worker"] generic_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault("env", {}) env.update({ "MOZHARNESS_DISABLE": "true", "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"), "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], "SCCACHE_DISABLE": "1", "WORK": ".", # Override the defaults in build scripts "GECKO_PATH": "./src", # with values suiteable for windows generic worker "UPLOAD_DIR": "./public/build", }) if "spidermonkey-platform" in run: env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform") script = "build-sm.sh" if run["using"] == "spidermonkey-package": script = "build-sm-package.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-package is not a supported configuration") elif run["using"] == "spidermonkey-mozjs-crate": script = "build-sm-mozjs-crate.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-mozjs-crate is not a supported configuration") elif run["using"] == "spidermonkey-rust-bindings": script = "build-sm-rust-bindings.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-rust-bindings is not a supported configuration") run["using"] = "run-task" run["command"] = [ "c:\\mozilla-build\\msys\\bin\\bash.exe " # string concat '"./src/taskcluster/scripts/builder/%s"' % script ] configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def configure_gradlew(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] # TODO: to uncomment later when we'll port over logic from bug 1622339 # worker.setdefault("env", {}).update({ # "ANDROID_SDK_ROOT": path.join( # run["workdir"], worker["env"]["MOZ_FETCHES_DIR"], "android-sdk-linux" # ) # }) run["command"] = _extract_gradlew_command(run) _inject_secrets_scopes(run, taskdesc) _set_run_task_attributes(job) configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
def docker_worker_spidermonkey(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker['artifacts'] = [] worker.setdefault('caches', []).append({ 'type': 'persistent', 'name': '{}-build-spidermonkey-workspace'.format(config.params['project']), 'mount-point': "{workdir}/workspace".format(**run), 'skip-untrusted': True, }) docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_tooltool(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run.pop('spidermonkey-variant'), 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'GECKO_PATH': '{}/workspace/build/src'.format(run['workdir']) }) if 'spidermonkey-platform' in run: env['SPIDERMONKEY_PLATFORM'] = run.pop('spidermonkey-platform') script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" run['using'] = 'run-task' run['cwd'] = run['workdir'] run['command'] = [ 'workspace/build/src/taskcluster/scripts/builder/{script}'.format( script=script) ] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def configure_mach(config, job, taskdesc): run = job['run'] command_prefix = 'cd $GECKO_PATH && ./mach ' mach = run['mach'] if isinstance(mach, dict): ref, pattern = next(iter(mach.items())) command = {ref: command_prefix + pattern} else: command = command_prefix + mach # defer to the run_task implementation run['command'] = command run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def configure_mach(config, job, taskdesc): run = job['run'] worker = job['worker'] additional_prefix = [] if job['worker-type'].endswith('1014'): additional_prefix = [ 'LC_ALL=en_US.UTF-8', 'LANG=en_US.UTF-8' ] python = run.get('python-version') if python: del run['python-version'] if worker['os'] == 'macosx' and python == 3: # OSX hosts can't seem to find python 3 on their own python = '/tools/python37/bin/python3.7' if job['worker-type'].endswith('1014'): python = '/usr/local/bin/python3' python = str(python) try: float(python) python = "python" + python except ValueError: pass additional_prefix.append(python) command_prefix = ' '.join(additional_prefix + ['./mach ']) mach = run['mach'] if isinstance(mach, dict): ref, pattern = next(iter(mach.items())) command = {ref: command_prefix + pattern} else: command = command_prefix + mach # defer to the run_task implementation run['command'] = command run['cwd'] = '{checkout}' run['using'] = 'run-task' del run['mach'] configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
def docker_worker_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker["chain-of-trust"] = True srcdir = get_vcsdir_name(worker["os"]) # If the task doesn't have a docker-image, set a default worker.setdefault("docker-image", {"in-tree": "toolchain-build"}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault("artifacts", []) if not any( artifact.get("name") == "public/build" for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) env = worker["env"] env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") if not taskgraph.fast: name = taskdesc["label"].replace(f"{config.kind}-", "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": get_digest_data(config, run, taskdesc), } run["using"] = "run-task" run["cwd"] = "{checkout}/.." run["command"] = [ "{}/taskcluster/scripts/toolchain/{}".format(srcdir, run.pop("script")) ] + run.pop("arguments", []) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def configure_mach(config, job, taskdesc): run = job["run"] worker = job["worker"] additional_prefix = [] if worker["os"] == "macosx": additional_prefix = ["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8"] python = run.get("python-version") if python: del run["python-version"] if worker["os"] == "macosx" and python == 3: python = "/usr/local/bin/python3" python = str(python) try: float(python) python = "python" + python except ValueError: pass additional_prefix.append(python) command_prefix = " ".join(additional_prefix + ["./mach "]) mach = run["mach"] if isinstance(mach, dict): ref, pattern = next(iter(mach.items())) command = {ref: command_prefix + pattern} else: command = command_prefix + mach # defer to the run_task implementation run["command"] = command run["cwd"] = "{checkout}" run["using"] = "run-task" del run["mach"] configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
def mozharness_on_generic_worker(config, job, taskdesc): assert (job["worker"]["os"] == "windows" ), "only supports windows right now: {}".format(job["label"]) run = job['run'] # fail if invalid run options are included invalid = [] for prop in ['need-xvfb']: if prop in run and run.pop(prop): invalid.append(prop) if not run.pop('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception( "Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] = job['worker'] worker['taskcluster-proxy'] = run.pop('taskcluster-proxy', None) setup_secrets(config, job, taskdesc) taskdesc['worker'].setdefault('artifacts', []).append({ 'name': 'public/logs', 'path': 'logs', 'type': 'directory' }) if not worker.get('skip-artifacts', False): generic_worker_add_artifacts(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MH_BRANCH': config.params['project'], 'MOZ_SOURCE_CHANGESET': get_branch_rev(config), 'MOZ_SOURCE_REPO': get_branch_repo(config), }) if run.pop('use-simple-package'): env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'}) extra_config = run.pop('extra-config', {}) extra_config['objdir'] = 'obj-build' env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(json.dumps(extra_config)) # The windows generic worker uses batch files to pass environment variables # to commands. Setting a variable to empty in a batch file unsets, so if # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that # mozharness doesn't try to find the commit message on its own. if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] or 'no commit message' if not job['attributes']['build_platform'].startswith('win'): raise Exception( "Task generation for mozharness build jobs currently only supported on Windows" ) mh_command = [ 'c:/mozilla-build/python/python.exe', '%GECKO_PATH%/testing/{}'.format(run.pop('script')), ] for path in run.pop('config-paths', []): mh_command.append('--extra-config-path %GECKO_PATH%/{}'.format(path)) for cfg in run.pop('config'): mh_command.append('--config ' + cfg) if run.pop('use-magic-mh-args'): mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--work-dir %cd:Z:=z:%\workspace') for action in run.pop('actions', []): mh_command.append('--' + action) for option in run.pop('options', []): mh_command.append('--' + option) if run.get('custom-build-variant-cfg'): mh_command.append('--custom-build-variant') mh_command.append(run.pop('custom-build-variant-cfg')) run['using'] = 'run-task' run['command'] = mh_command run.pop('secrets') run.pop('requires-signed-builds') run.pop('job-script', None) configure_taskdesc_for_run(config, job, taskdesc, worker['implementation']) # TODO We should run the mozharness script with `mach python` so these # modules are automatically available, but doing so somehow caused hangs in # Windows ccov builds (see bug 1543149). mozbase_dir = "{}/testing/mozbase".format(env['GECKO_PATH']) env['PYTHONPATH'] = ';'.join([ "{}/manifestparser".format(mozbase_dir), "{}/mozinfo".format(mozbase_dir), "{}/mozfile".format(mozbase_dir), "{}/mozprocess".format(mozbase_dir), "{}/third_party/python/six".format(env['GECKO_PATH']), ]) if taskdesc.get('needs-sccache'): worker['command'] = [ # Make the comment part of the first command, as it will help users to # understand what is going on, and why these steps are implemented. dedent('''\ :: sccache currently uses the full compiler commandline as input to the :: cache hash key, so create a symlink to the task dir and build from :: the symlink dir to get consistent paths. if exist z:\\build rmdir z:\\build'''), r'mklink /d z:\build %cd%', # Grant delete permission on the link to everyone. r'icacls z:\build /grant *S-1-1-0:D /L', r'cd /d z:\build', ] + worker['command']
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] if not run.pop('use-simple-package', None): raise NotImplementedError("Simple packaging cannot be disabled via" "'use-simple-package' on docker-workers") if not run.pop('use-magic-mh-args', None): raise NotImplementedError("Cannot disabled mh magic arg passing via" "'use-magic-mh-args' on docker-workers") # Running via mozharness assumes an image that contains build.sh: # by default, debian8-amd64-build, but it could be another image (like # android-build). worker.setdefault('docker-image', {'in-tree': 'debian8-amd64-build'}) worker.setdefault('artifacts', []).append({ 'name': 'public/logs', 'path': '{workdir}/logs/'.format(**run), 'type': 'directory' }) worker['taskcluster-proxy'] = run.pop('taskcluster-proxy', None) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'WORKSPACE': '{workdir}/workspace'.format(**run), 'MOZHARNESS_CONFIG': ' '.join(run.pop('config')), 'MOZHARNESS_SCRIPT': run.pop('script'), 'MH_BRANCH': config.params['project'], 'MOZ_SOURCE_CHANGESET': get_branch_rev(config), 'MOZ_SOURCE_REPO': get_branch_repo(config), 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'PYTHONUNBUFFERED': '1', }) worker.setdefault('required-volumes', []).append(env['WORKSPACE']) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run.pop('actions')) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run.pop('options')) if 'config-paths' in run: env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run.pop('config-paths')) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run.pop( 'custom-build-variant-cfg') extra_config = run.pop('extra-config', {}) extra_config['objdir'] = 'obj-build' env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(json.dumps(extra_config)) if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run.pop('keep-artifacts'): env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run.pop('need-xvfb'): env['NEED_XVFB'] = 'true' else: env['NEED_XVFB'] = 'false' # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = [4] setup_secrets(config, job, taskdesc) run['using'] = 'run-task' run['command'] = mozpath.join( "${GECKO_PATH}", run.pop('job-script', 'taskcluster/scripts/builder/build-linux.sh'), ) run.pop('secrets') run.pop('requires-signed-builds') configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])