def docker_worker_spidermonkey(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker.setdefault('artifacts', []) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run.pop('spidermonkey-variant'), 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) if 'spidermonkey-platform' in run: env['SPIDERMONKEY_PLATFORM'] = run.pop('spidermonkey-platform') script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" run['using'] = 'run-task' run['cwd'] = run['workdir'] run['command'] = [ './checkouts/gecko/taskcluster/scripts/builder/{script}'.format( script=script) ] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def docker_worker_hazard(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker['artifacts'] = [] docker_worker_add_artifacts(config, job, taskdesc) worker.setdefault('required-volumes', []).append('{workdir}/workspace'.format(**run)) add_tooltool(config, job, taskdesc) setup_secrets(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) # script parameters if run.get('mozconfig'): env['MOZCONFIG'] = run.pop('mozconfig') run['using'] = 'run-task' run['cwd'] = run['workdir'] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def docker_worker_hazard(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) add_tooltool(config, job, taskdesc) setup_secrets(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) # script parameters if run.get('mozconfig'): env['MOZCONFIG'] = run['mozconfig'] # build-haz-linux.sh needs this otherwise it assumes the checkout is in # the workspace. env['GECKO_DIR'] = '{workdir}/checkouts/gecko'.format(**run) worker['command'] = [ '{workdir}/bin/run-task'.format(**run), '--gecko-checkout', '{workdir}/checkouts/gecko'.format(**run), '--', '/bin/bash', '-c', run['command'] ]
def docker_worker_spidermonkey(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker.setdefault("artifacts", []) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault("env", {}) env.update({ "MOZHARNESS_DISABLE": "true", "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"), "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) if "spidermonkey-platform" in run: env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform") script = "build-sm.sh" if run["using"] == "spidermonkey-package": script = "build-sm-package.sh" run["using"] = "run-task" run["cwd"] = run["workdir"] run["command"] = [ "./checkouts/gecko/taskcluster/scripts/builder/{script}".format( script=script) ] configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_hazard(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker.setdefault("artifacts", []) docker_worker_add_artifacts(config, job, taskdesc) worker.setdefault("required-volumes", []).append("{workdir}/workspace".format(**run)) add_tooltool(config, job, taskdesc) setup_secrets(config, job, taskdesc) env = worker["env"] env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) # script parameters if run.get("mozconfig"): env["MOZCONFIG"] = run.pop("mozconfig") run["using"] = "run-task" run["cwd"] = run["workdir"] configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker['chain-of-trust'] = True # If the task doesn't have a docker-image, set a default worker.setdefault('docker-image', {'in-tree': 'toolchain-build'}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any( artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) # Toolchain checkouts don't live under {workdir}/checkouts workspace = '{workdir}/workspace/build'.format(**run) gecko_path = '{}/src'.format(workspace) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'GECKO_PATH': gecko_path, }) attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run.pop('toolchain-artifact') if 'toolchain-alias' in run: attributes['toolchain-alias'] = run.pop('toolchain-alias') if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) taskdesc['cache'] = { 'type': CACHE_TYPE, 'name': name, 'digest-data': get_digest_data(config, run, taskdesc), } # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = [path.join(gecko_path, 'mach'), 'python'] else: wrapper = [] run['using'] = 'run-task' run['cwd'] = run['workdir'] run["command"] = (wrapper + [ "workspace/build/src/taskcluster/scripts/misc/{}".format( run.pop("script")) ] + run.pop("arguments", [])) configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def docker_worker_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker["chain-of-trust"] = True # If the task doesn't have a docker-image, set a default worker.setdefault("docker-image", {"in-tree": "deb10-toolchain-build"}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault("artifacts", []) if not artifacts: docker_worker_add_artifacts(config, job, taskdesc) # Toolchain checkouts don't live under {workdir}/checkouts workspace = "{workdir}/workspace/build".format(**run) gecko_path = "{}/src".format(workspace) env = worker.setdefault("env", {}) env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], "GECKO_PATH": gecko_path, }) attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") digest_data = get_digest_data(config, run, taskdesc) if job.get("attributes", {}).get("cached_task") is not False and not taskgraph.fast: name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": digest_data, } run["using"] = "run-task" run["cwd"] = run["workdir"] run["command"] = [ "workspace/build/src/taskcluster/scripts/misc/{}".format( run.pop("script")) ] + run.pop("arguments", []) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_spidermonkey(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker.setdefault('caches', []).append({ 'type': 'persistent', 'name': '{}-build-spidermonkey-workspace'.format(config.params['project']), 'mount-point': "{workdir}/workspace".format(**run), 'skip-untrusted': True, }) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) if 'spidermonkey-platform' in run: env['SPIDERMONKEY_PLATFORM'] = run['spidermonkey-platform'] support_vcs_checkout(config, job, taskdesc) script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' add_tooltool(config, job, taskdesc, internal=internal) worker['command'] = [ '{workdir}/bin/run-task'.format(**run), '--gecko-checkout', '{workdir}/workspace/build/src'.format(**run), '--', '/bin/bash', '-c', 'cd {workdir} && workspace/build/src/taskcluster/scripts/builder/{script}' .format(workdir=run['workdir'], script=script) ]
def docker_worker_spidermonkey(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] worker['artifacts'] = [] worker.setdefault('caches', []).append({ 'type': 'persistent', 'name': '{}-build-spidermonkey-workspace'.format(config.params['project']), 'mount-point': "{workdir}/workspace".format(**run), 'skip-untrusted': True, }) docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_tooltool(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run.pop('spidermonkey-variant'), 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'GECKO_PATH': '{}/workspace/build/src'.format(run['workdir']) }) if 'spidermonkey-platform' in run: env['SPIDERMONKEY_PLATFORM'] = run.pop('spidermonkey-platform') script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" run['using'] = 'run-task' run['cwd'] = run['workdir'] run['command'] = [ 'workspace/build/src/taskcluster/scripts/builder/{script}'.format( script=script) ] configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def docker_worker_spidermonkey(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker.setdefault('caches', []).append({ 'type': 'persistent', 'name': 'level-{}-{}-build-spidermonkey-workspace'.format( config.params['level'], config.params['project']), 'mount-point': "/builds/worker/workspace", 'skip-untrusted': True, }) docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_tooltool(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) support_vcs_checkout(config, job, taskdesc) script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout', '/builds/worker/workspace/build/src', '--', '/bin/bash', '-c', 'cd /builds/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script ]
def docker_worker_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker["chain-of-trust"] = True srcdir = get_vcsdir_name(worker["os"]) # If the task doesn't have a docker-image, set a default worker.setdefault("docker-image", {"in-tree": "toolchain-build"}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault("artifacts", []) if not any( artifact.get("name") == "public/build" for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) env = worker["env"] env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") if not taskgraph.fast: name = taskdesc["label"].replace(f"{config.kind}-", "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": get_digest_data(config, run, taskdesc), } run["using"] = "run-task" run["cwd"] = "{checkout}/.." run["command"] = [ "{}/taskcluster/scripts/toolchain/{}".format(srcdir, run.pop("script")) ] + run.pop("arguments", []) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] if not run.pop('use-simple-package', None): raise NotImplementedError("Simple packaging cannot be disabled via" "'use-simple-package' on docker-workers") if not run.pop('use-magic-mh-args', None): raise NotImplementedError("Cannot disabled mh magic arg passing via" "'use-magic-mh-args' on docker-workers") # Running via mozharness assumes an image that contains build.sh: # by default, debian8-amd64-build, but it could be another image (like # android-build). worker.setdefault('docker-image', {'in-tree': 'debian8-amd64-build'}) worker.setdefault('artifacts', []).append({ 'name': 'public/logs', 'path': '{workdir}/logs/'.format(**run), 'type': 'directory' }) worker['taskcluster-proxy'] = run.pop('taskcluster-proxy', None) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'WORKSPACE': '{workdir}/workspace'.format(**run), 'MOZHARNESS_CONFIG': ' '.join(run.pop('config')), 'MOZHARNESS_SCRIPT': run.pop('script'), 'MH_BRANCH': config.params['project'], 'MOZ_SOURCE_CHANGESET': get_branch_rev(config), 'MOZ_SOURCE_REPO': get_branch_repo(config), 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'PYTHONUNBUFFERED': '1', }) worker.setdefault('required-volumes', []).append(env['WORKSPACE']) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run.pop('actions')) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run.pop('options')) if 'config-paths' in run: env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run.pop('config-paths')) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run.pop( 'custom-build-variant-cfg') extra_config = run.pop('extra-config', {}) extra_config['objdir'] = 'obj-build' env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(json.dumps(extra_config)) if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run.pop('keep-artifacts'): env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run.pop('need-xvfb'): env['NEED_XVFB'] = 'true' else: env['NEED_XVFB'] = 'false' # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = [4] setup_secrets(config, job, taskdesc) run['using'] = 'run-task' run['command'] = mozpath.join( "${GECKO_PATH}", run.pop('job-script', 'taskcluster/scripts/builder/build-linux.sh'), ) run.pop('secrets') run.pop('requires-signed-builds') configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['implementation'] = job['worker']['implementation'] if not run['use-simple-package']: raise NotImplementedError("Simple packaging cannot be disabled via" "'use-simple-package' on docker-workers") if not run['use-magic-mh-args']: raise NotImplementedError("Cannot disabled mh magic arg passing via" "'use-magic-mh-args' on docker-workers") # Running via mozharness assumes an image that contains build.sh: # by default, debian7-amd64-build, but it could be another image (like # android-build). taskdesc['worker'].setdefault('docker-image', {'in-tree': 'debian7-amd64-build'}) taskdesc['worker'].setdefault('artifacts', []).append({ 'name': 'public/logs', 'path': '{workdir}/logs/'.format(**run), 'type': 'directory' }) worker['taskcluster-proxy'] = run.get('taskcluster-proxy') docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache( config, job, taskdesc, extra=run.get('extra-workspace-cache-key')) support_vcs_checkout(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'GECKO_PATH': '{workdir}/workspace/build/src'.format(**run), 'MOZHARNESS_CONFIG': ' '.join(run['config']), 'MOZHARNESS_SCRIPT': run['script'], 'MH_BRANCH': config.params['project'], 'MOZ_SOURCE_CHANGESET': env['GECKO_HEAD_REV'], 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', 'PYTHONUNBUFFERED': '1', }) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) if 'config-paths' in run: env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run['config-paths']) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] if 'extra-config' in run: env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config']) if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] if run['comm-checkout']: env['MOZ_SOURCE_CHANGESET'] = env['COMM_HEAD_REV'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run['keep-artifacts']: env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run['need-xvfb']: env['NEED_XVFB'] = 'true' if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = [4] docker_worker_setup_secrets(config, job, taskdesc) command = [ '{workdir}/bin/run-task'.format(**run), '--gecko-checkout', env['GECKO_PATH'], ] if run['comm-checkout']: command.append( '--comm-checkout={workdir}/workspace/build/src/comm'.format(**run)) command += [ '--', '{workdir}/workspace/build/src/{script}'.format( workdir=run['workdir'], script=run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh'), ), ] worker['command'] = command
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] if not run.pop("use-simple-package", None): raise NotImplementedError( "Simple packaging cannot be disabled via" "'use-simple-package' on docker-workers" ) if not run.pop("use-magic-mh-args", None): raise NotImplementedError( "Cannot disabled mh magic arg passing via" "'use-magic-mh-args' on docker-workers" ) # Running via mozharness assumes an image that contains build.sh: # by default, debian8-amd64-build, but it could be another image (like # android-build). worker.setdefault("docker-image", {"in-tree": "debian8-amd64-build"}) worker.setdefault("artifacts", []).append( { "name": "public/logs", "path": "{workdir}/logs/".format(**run), "type": "directory", } ) worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None) docker_worker_add_artifacts(config, job, taskdesc) env = worker.setdefault("env", {}) env.update( { "WORKSPACE": "{workdir}/workspace".format(**run), "MOZHARNESS_CONFIG": " ".join(run.pop("config")), "MOZHARNESS_SCRIPT": run.pop("script"), "MH_BRANCH": config.params["project"], "MOZ_SOURCE_CHANGESET": get_branch_rev(config), "MOZ_SOURCE_REPO": get_branch_repo(config), "MH_BUILD_POOL": "taskcluster", "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], "PYTHONUNBUFFERED": "1", } ) worker.setdefault("required-volumes", []).append(env["WORKSPACE"]) if "actions" in run: env["MOZHARNESS_ACTIONS"] = " ".join(run.pop("actions")) if "options" in run: env["MOZHARNESS_OPTIONS"] = " ".join(run.pop("options")) if "config-paths" in run: env["MOZHARNESS_CONFIG_PATHS"] = " ".join(run.pop("config-paths")) if "custom-build-variant-cfg" in run: env["MH_CUSTOM_BUILD_VARIANT_CFG"] = run.pop("custom-build-variant-cfg") extra_config = run.pop("extra-config", {}) extra_config["objdir"] = "obj-build" env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text( json.dumps(extra_config, sort_keys=True) ) if "job-script" in run: env["JOB_SCRIPT"] = run["job-script"] if config.params.is_try(): env["TRY_COMMIT_MSG"] = config.params["message"] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run.pop("keep-artifacts"): env["DIST_TARGET_UPLOADS"] = "" env["DIST_UPLOADS"] = "" # Xvfb if run.pop("need-xvfb"): env["NEED_XVFB"] = "true" else: env["NEED_XVFB"] = "false" # Retry if mozharness returns TBPL_RETRY worker["retry-exit-status"] = [4] setup_secrets(config, job, taskdesc) run["using"] = "run-task" run["command"] = mozpath.join( "${GECKO_PATH}", run.pop("job-script", "taskcluster/scripts/builder/build-linux.sh"), ) run.pop("secrets") run.pop("requires-signed-builds") configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # If the task doesn't have a docker-image, set a default worker.setdefault('docker-image', {'in-tree': 'toolchain-build'}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any(artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) # Toolchain checkouts don't live under {workdir}/checkouts workspace = '{workdir}/workspace/build'.format(**run) gecko_path = '{}/src'.format(workspace) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', 'MOZ_FETCHES_DIR': workspace, 'GECKO_PATH': gecko_path, }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = '{}/mach python '.format(gecko_path) else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = ['--gecko-sparse-profile=build/sparse-profiles/{}' .format(run['sparse-profile'])] worker['command'] = [ '{workdir}/bin/run-task'.format(**run), '--gecko-checkout={}'.format(gecko_path), ] + sparse_profile + [ '--', 'bash', '-c', 'cd {} && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( run['workdir'], wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) taskdesc['cache'] = { 'type': CACHE_TYPE, 'name': name, 'digest-data': get_digest_data(config, run, taskdesc), }
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any(artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = 'workspace/build/src/mach python ' else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = ['--sparse-profile', 'build/sparse-profiles/{}'.format(run['sparse-profile'])] worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout=/builds/worker/workspace/build/src', ] + sparse_profile + [ '--', 'bash', '-c', 'cd /builds/worker && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def docker_macos_sdk_fetch(config, job, taskdesc): """ Facilitates downloading the macOS-11 SDK from the Firefox private artifact build. This gets around the requirement of using a macOS worker with Xcode installed to create the SDK tar file and instead downloads one that was already generated. Previously, toolchain artifacts with encumbered licenses such as the macOS SDK were made available to build jobs as private tooltool artifacts. There is a possibility of a race condition where the an SDK has been updated but the job is not completed. In this case, the previous version would be found when the Thunderbird decision task runs and that will be used for the build jobs that require it. Once the Firefox SDk build job completes, the index is updated and the next Thunderbird build will use it. As the SDK itself does not get updated very often, this should not pose a problem. """ run = job["run"] worker = taskdesc["worker"] = job["worker"] worker["chain-of-trust"] = True # If the task doesn't have a docker-image, set a default worker.setdefault("docker-image", {"in-tree": "deb8-toolchain-build"}) # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault("artifacts", []) if not any( artifact.get("name") == "public/build" for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) upload_dir = "{workdir}/artifacts".format(**run) attributes = taskdesc.setdefault("attributes", {}) # Level 1 builds can use level 2 & 3 toolchains if available sdk_task_id = None for level in reversed(range(int(config.params["level"]), 4)): gecko_index = attributes["gecko_index"].format(level=level) try: sdk_task_id = taskcluster.find_task_id(gecko_index) break except KeyError: continue if sdk_task_id is None: raise KeyError("toolchain index path {} not found".format(gecko_index)) # Sets the MOZ_FETCHES environment variable with the task id and artifact # path of the gecko artifact. This bypasses the usual setup done in # taskgraph/transforms/job/__init__.py. moz_fetches = { "task-reference": ensure_text( json.dumps([{ "artifact": attributes["gecko_artifact_path"], "extract": False, "task": sdk_task_id, }])) } # fetch-content dowmloads files to MOZ_FETCHES, so we set it to UPLOAD_DIR # so that it's found by the automatic artifact upload done at the end of # the "build". env = worker["env"] env.update({ "MOZ_SCM_LEVEL": config.params["level"], "MOZ_FETCHES": moz_fetches, "MOZ_FETCHES_DIR": upload_dir, }) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") if not taskgraph.fast: name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": get_digest_data(config, run, taskdesc), } run["using"] = "run-task" run["cwd"] = run["workdir"] run["command"] = ["/builds/worker/bin/fetch-content", "task-artifacts"] configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])